[ 491.129901] env[61440]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61440) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 491.130275] env[61440]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61440) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 491.130396] env[61440]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61440) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 491.130684] env[61440]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 491.220373] env[61440]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61440) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 491.231093] env[61440]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=61440) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 491.368638] env[61440]: INFO nova.virt.driver [None req-fa7ee0da-d7d6-44e6-8c9c-5c4b31a9e5f9 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 491.442994] env[61440]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 491.443220] env[61440]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 491.443220] env[61440]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61440) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 494.645652] env[61440]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-284c1e68-6daf-43b8-b798-f77c78f95b72 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.661686] env[61440]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61440) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 494.661814] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2c840f5c-dcb1-447b-948c-350689c09e47 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.687366] env[61440]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 394fe. [ 494.687475] env[61440]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.244s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 494.687987] env[61440]: INFO nova.virt.vmwareapi.driver [None req-fa7ee0da-d7d6-44e6-8c9c-5c4b31a9e5f9 None None] VMware vCenter version: 7.0.3 [ 494.691334] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638c9242-68f4-4ad9-99d6-551f90cdc1bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.711927] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d3e315-dace-4d09-8973-b6211270fbaa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.717764] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e076af5-d7fb-4147-8004-d420ece0d8d9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.724281] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fc3fd1-e315-41af-97c2-0598424dcf06 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.736925] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46eec741-6115-4c89-bc4b-2bc7a154ede0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.742704] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44c5ef6-435e-4dea-9757-98431be68b82 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.772290] env[61440]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-6182e27d-5b85-4bf9-8643-1d26bc0b09d0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.777188] env[61440]: DEBUG nova.virt.vmwareapi.driver [None req-fa7ee0da-d7d6-44e6-8c9c-5c4b31a9e5f9 None None] Extension org.openstack.compute already exists. {{(pid=61440) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 494.779847] env[61440]: INFO nova.compute.provider_config [None req-fa7ee0da-d7d6-44e6-8c9c-5c4b31a9e5f9 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 494.799412] env[61440]: DEBUG nova.context [None req-fa7ee0da-d7d6-44e6-8c9c-5c4b31a9e5f9 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),999fff60-35ef-48e5-a3f5-8c3dd2fc923e(cell1) {{(pid=61440) load_cells /opt/stack/nova/nova/context.py:464}} [ 494.801339] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.801566] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.802212] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 494.802642] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Acquiring lock "999fff60-35ef-48e5-a3f5-8c3dd2fc923e" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.802836] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Lock "999fff60-35ef-48e5-a3f5-8c3dd2fc923e" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.803843] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Lock "999fff60-35ef-48e5-a3f5-8c3dd2fc923e" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 494.823663] env[61440]: INFO dbcounter [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Registered counter for database nova_cell0 [ 494.831755] env[61440]: INFO dbcounter [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Registered counter for database nova_cell1 [ 494.834987] env[61440]: DEBUG oslo_db.sqlalchemy.engines [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61440) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 494.835363] env[61440]: DEBUG oslo_db.sqlalchemy.engines [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61440) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 494.839759] env[61440]: DEBUG dbcounter [-] [61440] Writer thread running {{(pid=61440) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 494.840862] env[61440]: DEBUG dbcounter [-] [61440] Writer thread running {{(pid=61440) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 494.842668] env[61440]: ERROR nova.db.main.api [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 494.842668] env[61440]: result = function(*args, **kwargs) [ 494.842668] env[61440]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 494.842668] env[61440]: return func(*args, **kwargs) [ 494.842668] env[61440]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 494.842668] env[61440]: result = fn(*args, **kwargs) [ 494.842668] env[61440]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 494.842668] env[61440]: return f(*args, **kwargs) [ 494.842668] env[61440]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 494.842668] env[61440]: return db.service_get_minimum_version(context, binaries) [ 494.842668] env[61440]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 494.842668] env[61440]: _check_db_access() [ 494.842668] env[61440]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 494.842668] env[61440]: stacktrace = ''.join(traceback.format_stack()) [ 494.842668] env[61440]: [ 494.843699] env[61440]: ERROR nova.db.main.api [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 494.843699] env[61440]: result = function(*args, **kwargs) [ 494.843699] env[61440]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 494.843699] env[61440]: return func(*args, **kwargs) [ 494.843699] env[61440]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 494.843699] env[61440]: result = fn(*args, **kwargs) [ 494.843699] env[61440]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 494.843699] env[61440]: return f(*args, **kwargs) [ 494.843699] env[61440]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 494.843699] env[61440]: return db.service_get_minimum_version(context, binaries) [ 494.843699] env[61440]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 494.843699] env[61440]: _check_db_access() [ 494.843699] env[61440]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 494.843699] env[61440]: stacktrace = ''.join(traceback.format_stack()) [ 494.843699] env[61440]: [ 494.844148] env[61440]: WARNING nova.objects.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Failed to get minimum service version for cell 999fff60-35ef-48e5-a3f5-8c3dd2fc923e [ 494.844229] env[61440]: WARNING nova.objects.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 494.844644] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Acquiring lock "singleton_lock" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 494.844807] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Acquired lock "singleton_lock" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 494.845060] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Releasing lock "singleton_lock" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 494.845391] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Full set of CONF: {{(pid=61440) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 494.845534] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ******************************************************************************** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 494.845663] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] Configuration options gathered from: {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 494.845799] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 494.845996] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 494.846139] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ================================================================================ {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 494.846353] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] allow_resize_to_same_host = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.846525] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] arq_binding_timeout = 300 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.846658] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] backdoor_port = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.846785] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] backdoor_socket = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.846952] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] block_device_allocate_retries = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.847130] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] block_device_allocate_retries_interval = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.847303] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cert = self.pem {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.847476] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.847635] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute_monitors = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.847797] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] config_dir = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.847967] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] config_drive_format = iso9660 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.848117] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.848283] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] config_source = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.848451] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] console_host = devstack {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.848616] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] control_exchange = nova {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.848774] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cpu_allocation_ratio = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.848934] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] daemon = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.849123] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] debug = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.849283] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] default_access_ip_network_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.849448] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] default_availability_zone = nova {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.849605] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] default_ephemeral_format = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.849767] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] default_green_pool_size = 1000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.849998] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.850182] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] default_schedule_zone = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.850342] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] disk_allocation_ratio = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.850511] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] enable_new_services = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.850679] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] enabled_apis = ['osapi_compute'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.850843] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] enabled_ssl_apis = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.851010] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] flat_injected = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.851180] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] force_config_drive = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.851338] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] force_raw_images = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.851508] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] graceful_shutdown_timeout = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.851669] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] heal_instance_info_cache_interval = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.851876] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] host = cpu-1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.852061] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.852228] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.852389] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.852602] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.852768] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_build_timeout = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.852931] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_delete_interval = 300 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.853117] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_format = [instance: %(uuid)s] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.853289] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_name_template = instance-%08x {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.853450] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_usage_audit = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.853620] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_usage_audit_period = month {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.853786] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.853952] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.854138] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] internal_service_availability_zone = internal {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.854297] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] key = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.854457] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] live_migration_retry_count = 30 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.854619] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_config_append = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.854786] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.854945] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_dir = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.855119] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.855251] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_options = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.855415] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_rotate_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.855585] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_rotate_interval_type = days {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.855755] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] log_rotation_type = none {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.855886] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856027] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856199] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856372] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856502] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856667] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] long_rpc_timeout = 1800 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856827] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] max_concurrent_builds = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.856989] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] max_concurrent_live_migrations = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.857191] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] max_concurrent_snapshots = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.857354] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] max_local_block_devices = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.857516] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] max_logfile_count = 30 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.857675] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] max_logfile_size_mb = 200 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.857835] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] maximum_instance_delete_attempts = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.858009] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metadata_listen = 0.0.0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.858192] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metadata_listen_port = 8775 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.858362] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metadata_workers = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.858527] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] migrate_max_retries = -1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.858699] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] mkisofs_cmd = genisoimage {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.858905] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.859051] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] my_ip = 10.180.1.21 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.859224] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] network_allocate_retries = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.859404] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.859573] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.859738] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] osapi_compute_listen_port = 8774 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.859907] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] osapi_compute_unique_server_name_scope = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.860086] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] osapi_compute_workers = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.860255] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] password_length = 12 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.860418] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] periodic_enable = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.860579] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] periodic_fuzzy_delay = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.860747] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] pointer_model = usbtablet {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.860912] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] preallocate_images = none {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.861090] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] publish_errors = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.861225] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] pybasedir = /opt/stack/nova {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.861389] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ram_allocation_ratio = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.861553] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] rate_limit_burst = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.861720] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] rate_limit_except_level = CRITICAL {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.861882] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] rate_limit_interval = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.862055] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reboot_timeout = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.862223] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reclaim_instance_interval = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.862383] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] record = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.862551] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reimage_timeout_per_gb = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.862715] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] report_interval = 120 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.862875] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] rescue_timeout = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863042] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reserved_host_cpus = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863206] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reserved_host_disk_mb = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863362] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reserved_host_memory_mb = 512 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863518] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] reserved_huge_pages = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863678] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] resize_confirm_window = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863835] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] resize_fs_using_block_device = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.863991] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] resume_guests_state_on_host_boot = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.864237] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.864408] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] rpc_response_timeout = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.864570] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] run_external_periodic_tasks = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.864739] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] running_deleted_instance_action = reap {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.864901] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.865070] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] running_deleted_instance_timeout = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.865233] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler_instance_sync_interval = 120 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.865399] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_down_time = 720 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.865568] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] servicegroup_driver = db {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866046] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] shelved_offload_time = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866046] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] shelved_poll_interval = 3600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866046] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] shutdown_timeout = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866212] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] source_is_ipv6 = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866370] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ssl_only = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866612] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866778] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] sync_power_state_interval = 600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.866940] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] sync_power_state_pool_size = 1000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.867123] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] syslog_log_facility = LOG_USER {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.867281] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] tempdir = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.867439] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] timeout_nbd = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.867605] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] transport_url = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.867764] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] update_resources_interval = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.867926] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_cow_images = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.868099] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_eventlog = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.868264] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_journal = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.868422] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_json = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.868582] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_rootwrap_daemon = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.868740] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_stderr = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.868896] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] use_syslog = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.869063] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vcpu_pin_set = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.869236] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plugging_is_fatal = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.869403] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plugging_timeout = 300 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.869566] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] virt_mkfs = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.869726] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] volume_usage_poll_interval = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.869888] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] watch_log_file = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.870066] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] web = /usr/share/spice-html5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.870260] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_concurrency.disable_process_locking = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870549] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870731] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870899] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871085] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871262] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871428] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871609] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.auth_strategy = keystone {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871777] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.compute_link_prefix = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871952] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872145] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.dhcp_domain = novalocal {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872318] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.enable_instance_password = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872483] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.glance_link_prefix = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872659] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872832] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872994] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.instance_list_per_project_cells = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873259] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.list_records_by_skipping_down_cells = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873425] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.local_metadata_per_cell = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873593] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.max_limit = 1000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873762] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.metadata_cache_expiration = 15 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873939] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.neutron_default_tenant_id = default {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874333] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.use_neutron_default_nets = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874333] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874455] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874808] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875029] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875216] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_dynamic_targets = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875389] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_jsonfile_path = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875575] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875772] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.backend = dogpile.cache.memcached {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875946] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.backend_argument = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876135] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.config_prefix = cache.oslo {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876308] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.dead_timeout = 60.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876475] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.debug_cache_backend = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876642] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.enable_retry_client = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876804] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.enable_socket_keepalive = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876978] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.enabled = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877156] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.enforce_fips_mode = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877321] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.expiration_time = 600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877485] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.hashclient_retry_attempts = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877651] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877813] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_dead_retry = 300 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877971] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_password = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.878151] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.878319] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.878482] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_pool_maxsize = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.878645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.878809] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_sasl_enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.878994] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.879180] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.879345] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.memcache_username = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.879511] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.proxies = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.879672] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.redis_password = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.879845] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.880030] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.880208] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.redis_server = localhost:6379 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.880373] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.redis_socket_timeout = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.880535] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.redis_username = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.880699] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.retry_attempts = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.880864] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.retry_delay = 0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.881041] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.socket_keepalive_count = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.881214] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.socket_keepalive_idle = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.881378] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.socket_keepalive_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.881538] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.tls_allowed_ciphers = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.881706] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.tls_cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.881869] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.tls_certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.882052] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.tls_enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.882223] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cache.tls_keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.882396] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.882572] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.auth_type = password {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.882735] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.882914] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.883091] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.883268] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.883435] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.cross_az_attach = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.883609] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.debug = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.883766] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.endpoint_template = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.883935] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.http_retries = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.884116] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.884282] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.884455] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.os_region_name = RegionOne {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.884623] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.884783] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cinder.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.884957] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.885142] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.cpu_dedicated_set = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.885303] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.cpu_shared_set = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.885471] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.image_type_exclude_list = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.885637] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.885801] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.885977] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.886154] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.886328] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.886495] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.resource_provider_association_refresh = 300 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.886660] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.886827] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.shutdown_retry_interval = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.887018] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.887205] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] conductor.workers = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.887387] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] console.allowed_origins = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.887552] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] console.ssl_ciphers = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.887725] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] console.ssl_minimum_version = default {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.887895] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] consoleauth.enforce_session_timeout = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.888083] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] consoleauth.token_ttl = 600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.888257] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.888417] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.888584] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.888743] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.888902] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.889072] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.889243] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.889403] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.889564] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.889722] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.889880] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.region_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.890047] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.890216] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.890390] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.service_type = accelerator {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.890555] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.890716] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.890874] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.891042] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.891234] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.891398] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] cyborg.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.891580] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.backend = sqlalchemy {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.891752] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.connection = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.891920] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.connection_debug = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.892107] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.connection_parameters = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.892276] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.connection_recycle_time = 3600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.892443] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.connection_trace = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.892606] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.db_inc_retry_interval = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.892771] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.db_max_retries = 20 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.892935] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.db_max_retry_interval = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.893114] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.db_retry_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.893280] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.max_overflow = 50 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.893443] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.max_pool_size = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.893605] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.max_retries = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.893777] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.893937] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.mysql_wsrep_sync_wait = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.894110] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.pool_timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.894277] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.retry_interval = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.894477] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.slave_connection = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.894643] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.sqlite_synchronous = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.894809] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] database.use_db_reconnect = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.895031] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.backend = sqlalchemy {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.895228] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.connection = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898410] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.connection_debug = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898410] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.connection_parameters = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898410] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.connection_recycle_time = 3600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898410] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.connection_trace = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898410] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.db_inc_retry_interval = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898410] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.db_max_retries = 20 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.db_max_retry_interval = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.db_retry_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.max_overflow = 50 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.max_pool_size = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.max_retries = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.pool_timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.retry_interval = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.slave_connection = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] api_database.sqlite_synchronous = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.898823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] devices.enabled_mdev_types = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899012] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899012] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899012] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ephemeral_storage_encryption.enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899012] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899137] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.api_servers = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899296] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899455] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899638] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899816] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.899994] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.900177] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.debug = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.900347] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.default_trusted_certificate_ids = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.900512] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.enable_certificate_validation = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.900677] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.enable_rbd_download = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.900840] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.901014] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.901198] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.901365] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.901526] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.901690] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.num_retries = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.901864] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.rbd_ceph_conf = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.902039] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.rbd_connect_timeout = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.902222] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.rbd_pool = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.902394] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.rbd_user = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.902558] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.region_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.902720] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.902883] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.903068] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.service_type = image {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.903242] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.903403] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.903560] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.903718] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.903899] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.904077] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.verify_glance_signatures = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.904247] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] glance.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.904418] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] guestfs.debug = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.904595] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] mks.enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.904947] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.905156] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] image_cache.manager_interval = 2400 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.905332] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] image_cache.precache_concurrency = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.905507] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] image_cache.remove_unused_base_images = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.905677] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.905846] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.906038] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] image_cache.subdirectory_name = _base {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.906223] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.api_max_retries = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.906392] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.api_retry_interval = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.906553] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.906716] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.auth_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.906880] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.907053] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.907227] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.907395] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.conductor_group = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.907554] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.907714] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.907874] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.908050] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.908216] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.908377] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.908536] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.908701] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.peer_list = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.908861] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.region_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.909029] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.909200] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.serial_console_state_timeout = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.909361] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.909533] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.service_type = baremetal {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.909693] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.shard = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.909858] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.910029] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.910195] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.910356] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.910538] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.910699] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ironic.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.910884] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.911071] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] key_manager.fixed_key = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.911261] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.911426] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.barbican_api_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.911587] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.barbican_endpoint = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.911759] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.barbican_endpoint_type = public {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.911921] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.barbican_region_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.912093] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.912256] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.912420] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.912583] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.912740] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.912903] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.number_of_retries = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.913094] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.retry_delay = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.913270] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.send_service_user_token = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.913435] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.913595] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.913760] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.verify_ssl = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.913920] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican.verify_ssl_path = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.914104] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.914271] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.auth_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.914431] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.914590] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.914756] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.914919] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.915099] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.915274] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.915434] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] barbican_service_user.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.915604] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.approle_role_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.915766] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.approle_secret_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.915929] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.916098] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.916271] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.916434] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.916591] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.916770] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.kv_mountpoint = secret {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.916932] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.kv_path = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.917113] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.kv_version = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.917277] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.namespace = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.917440] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.root_token_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.917603] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.917764] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.ssl_ca_crt_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.917924] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.918104] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.use_ssl = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.918282] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.918455] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.918619] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.auth_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.918779] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.918938] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.919116] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.919276] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.919433] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.919593] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.919753] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.919910] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.920076] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.920237] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.920396] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.region_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.920551] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.920708] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.920876] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.service_type = identity {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.921052] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.921223] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.921383] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.921541] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.921723] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.921887] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] keystone.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.922104] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.connection_uri = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.922277] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_mode = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.922447] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.922620] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_models = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.922794] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_power_governor_high = performance {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.922968] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.923152] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_power_management = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.923327] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.923496] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.device_detach_attempts = 8 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.923661] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.device_detach_timeout = 20 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.923826] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.disk_cachemodes = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.923988] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.disk_prefix = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.924171] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.enabled_perf_events = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.924337] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.file_backed_memory = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.924501] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.gid_maps = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.924663] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.hw_disk_discard = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.924831] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.hw_machine_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.925014] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_rbd_ceph_conf = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.925187] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.925353] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.925522] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_rbd_glance_store_name = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.925691] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_rbd_pool = rbd {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.925862] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_type = default {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.926055] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.images_volume_group = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.926238] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.inject_key = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.926405] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.inject_partition = -2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.926568] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.inject_password = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.926732] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.iscsi_iface = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.926897] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.iser_use_multipath = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.927088] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.927273] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.927441] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_downtime = 500 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.927603] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.927764] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.927924] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_inbound_addr = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.928109] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.928275] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.928441] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_scheme = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.928616] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_timeout_action = abort {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.928783] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_tunnelled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.928943] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_uri = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.929122] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.live_migration_with_native_tls = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.929285] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.max_queues = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.929448] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.929680] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.929843] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.nfs_mount_options = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.930148] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.930329] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.930496] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.930655] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.930818] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.930980] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.num_pcie_ports = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.931165] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.931331] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.pmem_namespaces = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.931491] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.quobyte_client_cfg = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.931776] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.931950] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.932132] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.932298] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.932459] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rbd_secret_uuid = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.932617] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rbd_user = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.932781] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.932954] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.933130] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rescue_image_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.933291] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rescue_kernel_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.933448] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rescue_ramdisk_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.933616] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.933773] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.rx_queue_size = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.933942] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.smbfs_mount_options = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.934232] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.934411] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.snapshot_compression = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.934577] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.snapshot_image_format = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.934797] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.934967] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.sparse_logical_volumes = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.935149] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.swtpm_enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.935324] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.swtpm_group = tss {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.935493] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.swtpm_user = tss {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.935666] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.sysinfo_serial = unique {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.935826] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.tb_cache_size = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.936017] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.tx_queue_size = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.936200] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.uid_maps = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.936370] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.use_virtio_for_bridges = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.936546] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.virt_type = kvm {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.936717] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.volume_clear = zero {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.936882] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.volume_clear_size = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.937078] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.volume_use_multipath = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.937265] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_cache_path = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.937440] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.937611] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.937779] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.937950] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.938242] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.938423] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.vzstorage_mount_user = stack {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.938590] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.938763] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.938939] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.auth_type = password {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.939117] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.939281] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.939447] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.939605] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.939764] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.939936] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.default_floating_pool = public {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.940110] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.940275] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.extension_sync_interval = 600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.940439] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.http_retries = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.940603] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.940765] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.940927] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.941113] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.941275] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.941446] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.ovs_bridge = br-int {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.941610] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.physnets = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.941783] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.region_name = RegionOne {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.941944] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.942130] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.service_metadata_proxy = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.942292] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.942463] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.service_type = network {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.942624] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.942783] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.942943] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.943120] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.943306] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.943469] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] neutron.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.943644] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] notifications.bdms_in_notifications = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.943825] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] notifications.default_level = INFO {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.944010] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] notifications.notification_format = unversioned {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.944188] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] notifications.notify_on_state_change = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.944369] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.944544] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] pci.alias = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.944715] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] pci.device_spec = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.944882] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] pci.report_in_placement = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.945066] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.945252] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.auth_type = password {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.945423] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.945586] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.945744] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.945907] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.946101] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.946269] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.946429] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.default_domain_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.946588] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.default_domain_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.946746] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.domain_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.946904] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.domain_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.947094] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.947282] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.947445] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.947605] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.947761] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.947930] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.password = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.948105] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.project_domain_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.948276] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.project_domain_name = Default {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.948444] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.project_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.948618] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.project_name = service {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.948791] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.region_name = RegionOne {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.948954] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.949133] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.949303] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.service_type = placement {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.949491] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.949666] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.949831] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950018] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.system_scope = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950180] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950339] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.trust_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950495] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.user_domain_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950665] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.user_domain_name = Default {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950823] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.user_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.950996] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.username = placement {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.951195] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.951356] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] placement.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.951534] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.cores = 20 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.951703] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.count_usage_from_placement = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.951878] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.952065] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.injected_file_content_bytes = 10240 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.952240] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.injected_file_path_length = 255 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.952406] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.injected_files = 5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.952572] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.instances = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.952736] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.key_pairs = 100 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.952901] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.metadata_items = 128 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.953075] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.ram = 51200 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.953246] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.recheck_quota = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.953413] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.server_group_members = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.953578] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] quota.server_groups = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.953751] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.953917] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.954093] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.image_metadata_prefilter = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.954263] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.954429] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.max_attempts = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.954596] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.max_placement_results = 1000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.954760] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.954924] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.955098] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.955279] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] scheduler.workers = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.955460] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.955633] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.955810] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.956014] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.956205] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.956377] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.956541] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.956735] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.956905] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.host_subset_size = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.957109] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.957292] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.957460] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.957626] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.isolated_hosts = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.957790] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.isolated_images = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.957953] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.958135] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.958308] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.958475] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.pci_in_placement = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.958638] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.958799] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.958965] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.959137] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.959302] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.959464] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.959627] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.track_instance_changes = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.959804] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.959976] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metrics.required = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.960156] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metrics.weight_multiplier = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.960319] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.960483] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] metrics.weight_setting = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.960801] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.960981] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] serial_console.enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.961206] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] serial_console.port_range = 10000:20000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.961401] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.961577] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.961745] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] serial_console.serialproxy_port = 6083 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.961914] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.962104] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.auth_type = password {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.962275] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.962437] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.962601] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.962766] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.962926] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.963122] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.send_service_user_token = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.963294] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.963458] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] service_user.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.963645] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.agent_enabled = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.963813] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.964148] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.964348] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.964520] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.html5proxy_port = 6082 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.964685] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.image_compression = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.964846] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.jpeg_compression = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.965012] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.playback_compression = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.965192] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.server_listen = 127.0.0.1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.965363] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.965523] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.streaming_mode = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.965683] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] spice.zlib_compression = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.965850] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] upgrade_levels.baseapi = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.966059] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] upgrade_levels.compute = auto {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.966237] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] upgrade_levels.conductor = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.966401] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] upgrade_levels.scheduler = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.966570] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.966737] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.966901] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.967089] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.967274] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.967442] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.967603] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.967768] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.967927] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vendordata_dynamic_auth.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.968117] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.api_retry_count = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.968285] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.ca_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.968460] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.968630] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.cluster_name = testcl1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.968796] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.connection_pool_size = 10 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.968957] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.console_delay_seconds = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.969145] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.datastore_regex = ^datastore.* {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.969349] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.969524] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.host_password = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.969693] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.host_port = 443 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.969862] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.host_username = administrator@vsphere.local {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.970044] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.insecure = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.970217] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.integration_bridge = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.970385] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.maximum_objects = 100 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.970545] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.pbm_default_policy = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.970710] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.pbm_enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.970872] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.pbm_wsdl_location = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.971056] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.971224] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.serial_port_proxy_uri = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.971387] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.serial_port_service_uri = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.971554] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.task_poll_interval = 0.5 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.971732] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.use_linked_clone = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.971902] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.vnc_keymap = en-us {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.972079] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.vnc_port = 5900 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.972251] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vmware.vnc_port_total = 10000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.972439] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.auth_schemes = ['none'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.972615] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.972919] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.973117] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.973296] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.novncproxy_port = 6080 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.973473] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.server_listen = 127.0.0.1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.973707] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.973894] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.vencrypt_ca_certs = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.974072] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.vencrypt_client_cert = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.974242] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vnc.vencrypt_client_key = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.974427] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.974595] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.disable_deep_image_inspection = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.974760] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.974925] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.975105] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.975275] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.disable_rootwrap = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.975439] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.enable_numa_live_migration = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.975602] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.975765] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.975951] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.976133] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.libvirt_disable_apic = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.976303] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.976469] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.976632] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.976795] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.976961] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.977158] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.977331] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.977495] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.977659] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.977824] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.978017] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.978196] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.client_socket_timeout = 900 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.978370] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.default_pool_size = 1000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.978538] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.keep_alive = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.978705] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.max_header_line = 16384 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.978869] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.979044] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.ssl_ca_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.979213] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.ssl_cert_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.979373] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.ssl_key_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.979539] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.tcp_keepidle = 600 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.979716] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.979886] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] zvm.ca_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.980057] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] zvm.cloud_connector_url = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.980343] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.980518] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] zvm.reachable_timeout = 300 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.980700] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.enforce_new_defaults = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.980870] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.enforce_scope = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.981055] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.policy_default_rule = default {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.981241] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.981416] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.policy_file = policy.yaml {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.981588] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.981747] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.981904] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.982074] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.982239] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.982408] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.982582] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.982759] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.connection_string = messaging:// {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.982928] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.enabled = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.983115] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.es_doc_type = notification {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.983281] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.es_scroll_size = 10000 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.983450] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.es_scroll_time = 2m {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.983623] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.filter_error_trace = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.983785] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.hmac_keys = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.983957] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.sentinel_service_name = mymaster {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.984139] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.socket_timeout = 0.1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.984306] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.trace_requests = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.984470] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler.trace_sqlalchemy = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.984655] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler_jaeger.process_tags = {} {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.984819] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler_jaeger.service_name_prefix = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.984982] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] profiler_otlp.service_name_prefix = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.985165] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] remote_debug.host = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.985325] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] remote_debug.port = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.985507] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.985671] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.985836] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.986029] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.986211] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.986376] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.986537] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.986700] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.986862] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.987066] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.987288] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.987484] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.987657] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.987830] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.988009] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.988194] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.988359] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.988535] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.988700] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.988865] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.989041] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.989214] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.989380] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.989547] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.989711] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.989874] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.990049] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.990219] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.990388] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.990558] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.ssl = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.990734] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.990907] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.991093] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.991272] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.991445] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.991610] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.991799] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.991967] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_notifications.retry = -1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.992163] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.992342] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.992514] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.auth_section = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.992678] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.auth_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.992838] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.cafile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.992998] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.certfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.993176] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.collect_timing = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.993335] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.connect_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.993493] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.connect_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.993649] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.endpoint_id = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.993806] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.endpoint_override = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.993968] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.insecure = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.994141] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.keyfile = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.994305] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.max_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.994460] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.min_version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.994618] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.region_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.994773] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.retriable_status_codes = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.994929] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.service_name = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.995100] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.service_type = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.995263] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.split_loggers = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.995419] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.status_code_retries = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.995574] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.status_code_retry_delay = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.995728] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.timeout = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.995883] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.valid_interfaces = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.996066] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_limit.version = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.996241] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_reports.file_event_handler = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.996405] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.996563] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] oslo_reports.log_dir = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.996732] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.996889] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.997071] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.997255] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.997420] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.997577] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.997747] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.997905] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_ovs_privileged.group = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.998076] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.998252] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.998685] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.998685] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] vif_plug_ovs_privileged.user = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.998790] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.998895] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.999083] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.999261] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.999431] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.999596] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.999760] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.999924] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.000117] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.000292] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.isolate_vif = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.000460] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.000624] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.000793] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.000961] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.001138] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_vif_ovs.per_port_bridge = False {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.001304] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_brick.lock_path = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.001468] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.001632] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.001801] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] privsep_osbrick.capabilities = [21] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.001960] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] privsep_osbrick.group = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.002134] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] privsep_osbrick.helper_command = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.002301] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.002464] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.002622] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] privsep_osbrick.user = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.002795] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.002953] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] nova_sys_admin.group = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.003138] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] nova_sys_admin.helper_command = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.003308] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.003473] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.003630] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] nova_sys_admin.user = None {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 495.003762] env[61440]: DEBUG oslo_service.service [None req-2f2eb893-85d0-4e83-b534-6a86a4c22a36 None None] ******************************************************************************** {{(pid=61440) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 495.004216] env[61440]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 495.014531] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Getting list of instances from cluster (obj){ [ 495.014531] env[61440]: value = "domain-c8" [ 495.014531] env[61440]: _type = "ClusterComputeResource" [ 495.014531] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 495.015790] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af75c1a2-e411-4e41-8be7-8d60276e39e7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.025087] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Got total of 0 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 495.025630] env[61440]: WARNING nova.virt.vmwareapi.driver [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 495.026117] env[61440]: INFO nova.virt.node [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Generated node identity 9fb487e1-32f6-4c78-bc1f-37162b31d3aa [ 495.026347] env[61440]: INFO nova.virt.node [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Wrote node identity 9fb487e1-32f6-4c78-bc1f-37162b31d3aa to /opt/stack/data/n-cpu-1/compute_id [ 495.039109] env[61440]: WARNING nova.compute.manager [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Compute nodes ['9fb487e1-32f6-4c78-bc1f-37162b31d3aa'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 495.075048] env[61440]: INFO nova.compute.manager [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 495.096355] env[61440]: WARNING nova.compute.manager [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 495.096820] env[61440]: DEBUG oslo_concurrency.lockutils [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.097142] env[61440]: DEBUG oslo_concurrency.lockutils [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.097316] env[61440]: DEBUG oslo_concurrency.lockutils [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 495.097477] env[61440]: DEBUG nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 495.098539] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0274d6-df03-4bf8-98fe-75bf2f75bbe5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.107084] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac12cd6-51a3-44ec-92e3-642598eb62f4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.120740] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ba9242-6012-4197-93b7-55dacb2a7f71 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.126844] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5286b62-ec87-4be3-94ea-f70f1bae1c2b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.156144] env[61440]: DEBUG nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180681MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 495.156301] env[61440]: DEBUG oslo_concurrency.lockutils [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.156505] env[61440]: DEBUG oslo_concurrency.lockutils [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.168592] env[61440]: WARNING nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] No compute node record for cpu-1:9fb487e1-32f6-4c78-bc1f-37162b31d3aa: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 9fb487e1-32f6-4c78-bc1f-37162b31d3aa could not be found. [ 495.184253] env[61440]: INFO nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa [ 495.242045] env[61440]: DEBUG nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 495.242045] env[61440]: DEBUG nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=183GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 495.347996] env[61440]: INFO nova.scheduler.client.report [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] [req-6000d3f4-5f57-4bf2-8f50-0b3d084d9a7f] Created resource provider record via placement API for resource provider with UUID 9fb487e1-32f6-4c78-bc1f-37162b31d3aa and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 495.363283] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a946fe78-d785-4f26-abc9-2a9d0a102151 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.370925] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c1d341-b339-4117-b7a7-21aa3560f65b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.399645] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984b5298-0544-4d9f-ae89-b1bfbde518e3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.406648] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f445a7cc-d51d-4005-b66f-81011b292cf2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.419893] env[61440]: DEBUG nova.compute.provider_tree [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 495.462019] env[61440]: DEBUG nova.scheduler.client.report [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Updated inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 495.462019] env[61440]: DEBUG nova.compute.provider_tree [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Updating resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa generation from 0 to 1 during operation: update_inventory {{(pid=61440) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 495.462019] env[61440]: DEBUG nova.compute.provider_tree [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 495.526940] env[61440]: DEBUG nova.compute.provider_tree [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Updating resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa generation from 1 to 2 during operation: update_traits {{(pid=61440) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 495.543251] env[61440]: DEBUG nova.compute.resource_tracker [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 495.544029] env[61440]: DEBUG oslo_concurrency.lockutils [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.387s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 495.544029] env[61440]: DEBUG nova.service [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Creating RPC server for service compute {{(pid=61440) start /opt/stack/nova/nova/service.py:182}} [ 495.557562] env[61440]: DEBUG nova.service [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] Join ServiceGroup membership for this service compute {{(pid=61440) start /opt/stack/nova/nova/service.py:199}} [ 495.557969] env[61440]: DEBUG nova.servicegroup.drivers.db [None req-590b9ba0-8059-4931-9ca1-3be949e6f841 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61440) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 504.841906] env[61440]: DEBUG dbcounter [-] [61440] Writing DB stats nova_cell1:SELECT=1 {{(pid=61440) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 504.843397] env[61440]: DEBUG dbcounter [-] [61440] Writing DB stats nova_cell0:SELECT=1 {{(pid=61440) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 537.748243] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquiring lock "33c87cb0-cd99-4c35-bcfa-899256be0460" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.748243] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Lock "33c87cb0-cd99-4c35-bcfa-899256be0460" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.772142] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 537.890485] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.890744] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.892346] env[61440]: INFO nova.compute.claims [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 538.025070] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b44b39b-c457-4b05-930a-9ffe124e015b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.033109] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed9a237-d1cf-4eca-91d5-398583f1ccba {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.064346] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca93b4ff-7706-4ac0-9c9c-5108dee4ba47 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.072046] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16c034e-eb81-4b75-ab11-e5098ad8d24b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.088152] env[61440]: DEBUG nova.compute.provider_tree [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.099130] env[61440]: DEBUG nova.scheduler.client.report [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 538.125299] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.234s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.125871] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 538.195204] env[61440]: DEBUG nova.compute.utils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.197624] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 538.198530] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 538.227867] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 538.325263] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 539.235830] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquiring lock "85c2cfe1-443a-4373-bdba-b2a957a8681b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.236915] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Lock "85c2cfe1-443a-4373-bdba-b2a957a8681b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.255594] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 539.390772] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.391098] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.392943] env[61440]: INFO nova.compute.claims [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.557910] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e526781d-cfc6-4b6e-8dec-bdac7fa8c3a4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.570903] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bb072a-6d06-4913-a6c7-b22444b98a3f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.605378] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5fc417-a17b-4d7a-805f-5f0d0dbeaaf4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.614152] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb15310-0390-4271-8009-96ce7c83d93d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.634662] env[61440]: DEBUG nova.compute.provider_tree [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.646741] env[61440]: DEBUG nova.scheduler.client.report [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 539.680100] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.680823] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 539.720311] env[61440]: DEBUG nova.compute.utils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.723501] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 539.724049] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 539.738153] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 539.759215] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.759215] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.763557] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.763557] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.763705] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.763861] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.764102] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.765183] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.765183] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.765183] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.765183] env[61440]: DEBUG nova.virt.hardware [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.766228] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b85d63-3757-4710-9a15-c1e33bb5c408 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.788359] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ede6d0-a1a0-4bb4-b83b-ac90f8906b53 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.814652] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebe2c70-515c-4b63-af98-f38318f7b128 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.867018] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 539.911205] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.911370] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.911506] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.913289] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.913289] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.913289] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.913289] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.913486] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.913486] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.913486] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.913486] env[61440]: DEBUG nova.virt.hardware [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.914345] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a9459b-4ca9-4f17-9d17-07eb12e304d1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.922818] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd4c354-8560-4315-bd2b-cd720d1aa645 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.947201] env[61440]: DEBUG nova.policy [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '023a0da743db4ab7bac2e8fd560d6a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'befc54ff060b4540bfb4834aef2bf193', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 539.973543] env[61440]: DEBUG nova.policy [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '287f266a3fdf45e4af60528511f75b80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4197f2cbdc5140a1bc08be023816e01e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 540.795339] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Successfully created port: 73a9dec1-147f-4ac6-b372-bcaf623e1ce5 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 541.360632] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Successfully created port: d2a0357e-751c-4493-a5d7-b995727b244c {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.111868] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "ce0340f3-116c-4196-a5e1-ae1225f6c4b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.113293] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Lock "ce0340f3-116c-4196-a5e1-ae1225f6c4b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.119987] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.120229] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.127443] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 542.158411] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 542.264321] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.264321] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.265521] env[61440]: INFO nova.compute.claims [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.272389] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.435373] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191a2e9d-8010-4881-8775-570598cf9d5f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.449747] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767f3ae0-2d9f-42c6-89e6-04a2ff08b5b8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.486833] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa94671-7582-48a5-b6c0-28379cea3ae2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.495300] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370c16ac-5d17-475e-be6b-c8245ff8e546 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.511871] env[61440]: DEBUG nova.compute.provider_tree [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.528180] env[61440]: DEBUG nova.scheduler.client.report [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.550768] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.551394] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 542.553897] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.282s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.557016] env[61440]: INFO nova.compute.claims [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.639317] env[61440]: DEBUG nova.compute.utils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.640685] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 542.641163] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 542.657563] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 542.769772] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6234fc49-690d-4463-8191-ae969fd005af {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.776144] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9716699-9a34-4505-9724-8bfb9e4219d2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.783094] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 542.820310] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe25644c-88b5-4832-8d54-3984dcdc122f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.834670] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.834939] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.835121] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.835380] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.835479] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.835606] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.835852] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.835969] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.839225] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.839225] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.839225] env[61440]: DEBUG nova.virt.hardware [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.839225] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3625eea-95a0-459c-b921-6652ad6605d5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.847766] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad1b4a0-d0b2-419d-9754-2f57b2253907 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.867252] env[61440]: DEBUG nova.compute.provider_tree [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.882231] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ded06e0-6fb1-4c06-b6f1-a3db82775de3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.890062] env[61440]: DEBUG nova.scheduler.client.report [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.914125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.360s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.914824] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 542.977574] env[61440]: DEBUG nova.compute.utils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.978868] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Not allocating networking since 'none' was specified. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 542.994312] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 543.092520] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.092782] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.101998] env[61440]: DEBUG nova.policy [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc8609fb061d4431af1ae25dd1bc13c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a82168de312442f1bad306e6b13d639c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 543.128553] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 543.138109] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 543.232500] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.232874] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.232934] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.233307] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.233307] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.233431] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.234705] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.234705] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.234705] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.234705] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.234705] env[61440]: DEBUG nova.virt.hardware [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.238379] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea182839-c261-4efd-91a3-71ea694adee0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.241865] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.242148] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.243641] env[61440]: INFO nova.compute.claims [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.253556] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97570fe-51b9-423e-bf2c-5ab5dcc4e91a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.272730] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Instance VIF info [] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 543.283994] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 543.284706] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66b790f3-e6f0-4526-8048-0603b3c90f39 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.300768] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Created folder: OpenStack in parent group-v4. [ 543.300965] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Creating folder: Project (8e7ded3b9eaf478185e66c69522fe87b). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 543.301225] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdffdb99-1766-4dba-825a-7457f8e4e075 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.310584] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Created folder: Project (8e7ded3b9eaf478185e66c69522fe87b) in parent group-v843372. [ 543.310763] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Creating folder: Instances. Parent ref: group-v843373. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 543.311026] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8050f598-de56-4520-b1ee-aa8ccf838a49 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.320589] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Created folder: Instances in parent group-v843373. [ 543.321070] env[61440]: DEBUG oslo.service.loopingcall [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.321070] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 543.321538] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a546c8b-f63a-4c72-80c0-fa4d38496369 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.345354] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 543.345354] env[61440]: value = "task-4281199" [ 543.345354] env[61440]: _type = "Task" [ 543.345354] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.357934] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281199, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.424221] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "94b6a93d-de4d-4600-94af-81dce16b22f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.424481] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.446664] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 543.530405] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e2669a-cc84-4050-8654-77753be688cd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.537968] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b568450e-3684-4939-9e2c-636bbf8e9225 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.542955] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.571783] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1d3170-10c1-4d2f-8aab-c730fed82b0e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.579029] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e38d9b-ab96-474c-a2ac-2f07027d61f4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.595718] env[61440]: DEBUG nova.compute.provider_tree [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.609103] env[61440]: DEBUG nova.scheduler.client.report [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.636472] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.394s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.637019] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 543.639688] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.097s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.642476] env[61440]: INFO nova.compute.claims [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.691027] env[61440]: DEBUG nova.compute.utils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.692706] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 543.693668] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 543.710585] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 543.798740] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 543.835502] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.835746] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.835905] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.836108] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.836416] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.836416] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.836683] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.836847] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.837825] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.837825] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.837945] env[61440]: DEBUG nova.virt.hardware [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.838838] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9b8ef0-3d14-43e6-9f1c-b9506d831a95 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.844607] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829dda53-4b9e-4217-b6bf-e273b0a1cd50 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.857053] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcae6f2-dd7f-401e-b358-cfb15ec96f08 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.864210] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f58e38-4a53-4b0a-b6b8-6e39fa65107b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.872203] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281199, 'name': CreateVM_Task, 'duration_secs': 0.374978} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.905099] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 543.906410] env[61440]: DEBUG oslo_vmware.service [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fdde57-dd7c-4c6b-9c4b-d9a36b8f5a3c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.909897] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d025cf5-085e-45d0-9a38-da8df6903b86 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.916488] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.916488] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.917379] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 543.918577] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-303b6b22-68ce-43aa-94af-fd207d292e02 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.923014] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a54e17-2a6e-4134-bd79-3a4bf604a6e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.929128] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Waiting for the task: (returnval){ [ 543.929128] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5227bd80-3070-6e37-a608-d5a3460feeba" [ 543.929128] env[61440]: _type = "Task" [ 543.929128] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.940449] env[61440]: DEBUG nova.compute.provider_tree [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.946753] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.947012] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 543.947255] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.947410] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.948282] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 543.948282] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-627e7eb6-f834-485a-ae01-b4adbd7bff60 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.952026] env[61440]: DEBUG nova.scheduler.client.report [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.972239] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 543.972590] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 543.973464] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b98a78e-f3dc-4ae4-94a7-3787f117ec9e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.982774] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e02a076c-0498-4f2c-8490-c9b6de391d24 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.988523] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Waiting for the task: (returnval){ [ 543.988523] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52c21c4e-03c7-bac0-b468-f1f5c8bfe33e" [ 543.988523] env[61440]: _type = "Task" [ 543.988523] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.997145] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52c21c4e-03c7-bac0-b468-f1f5c8bfe33e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.003146] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.363s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.004030] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 544.049853] env[61440]: DEBUG nova.compute.utils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.051860] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 544.055020] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.068488] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 544.171734] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 544.210898] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.211259] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.211394] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.211617] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.211953] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.212038] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.212309] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.212383] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.212579] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.212870] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.213451] env[61440]: DEBUG nova.virt.hardware [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.214084] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97cd26e-f49c-43e0-a133-f48d7fa1d82d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.227714] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcbdf9d-0600-47c4-bb8e-b453579daf2a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.363299] env[61440]: DEBUG nova.policy [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f8d04fb246c4db2832764c51765331a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b71be25936c74457ab25a895906fbbc6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.488481] env[61440]: DEBUG nova.policy [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cfe4f9228be40aeb8812ed8e7580c15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '696cc589cef24864bb136b996daad710', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.500201] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 544.501777] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Creating directory with path [datastore2] vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 544.501777] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-472ed440-2724-4352-9cc6-8436b2a3e582 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.523655] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Created directory with path [datastore2] vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 544.523655] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Fetch image to [datastore2] vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 544.523655] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 544.524338] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f6651b-73ba-48c7-ab77-335ed8555d4f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.531765] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d590362a-5e6d-4e79-9645-f807d1c2fb7f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.542842] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e55806-4189-44b2-b904-d443bc800267 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.580329] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a20474-3d44-4523-9184-8e8767ee9598 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.586382] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-03ffdda8-dd9c-41f5-a4a2-b4ac1f2f7861 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.676083] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 544.710882] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Successfully updated port: 73a9dec1-147f-4ac6-b372-bcaf623e1ce5 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 544.732986] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquiring lock "refresh_cache-85c2cfe1-443a-4373-bdba-b2a957a8681b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.732986] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquired lock "refresh_cache-85c2cfe1-443a-4373-bdba-b2a957a8681b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.732986] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 544.744828] env[61440]: DEBUG oslo_vmware.rw_handles [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 544.813430] env[61440]: DEBUG oslo_vmware.rw_handles [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 544.813607] env[61440]: DEBUG oslo_vmware.rw_handles [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 544.881848] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Successfully updated port: d2a0357e-751c-4493-a5d7-b995727b244c {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 544.890257] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquiring lock "refresh_cache-33c87cb0-cd99-4c35-bcfa-899256be0460" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.890405] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquired lock "refresh_cache-33c87cb0-cd99-4c35-bcfa-899256be0460" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.890558] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 544.915568] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.224491] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.384641] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Successfully created port: fc9f18b3-bc3b-47ef-a946-75c3513758fb {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.559462] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_power_states {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.592235] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Getting list of instances from cluster (obj){ [ 545.592235] env[61440]: value = "domain-c8" [ 545.592235] env[61440]: _type = "ClusterComputeResource" [ 545.592235] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 545.594788] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ae143e-364d-4245-a34f-17ef1953f8f6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.609120] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Got total of 1 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 545.609363] env[61440]: WARNING nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] While synchronizing instance power states, found 6 instances in the database and 1 instances on the hypervisor. [ 545.609584] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 33c87cb0-cd99-4c35-bcfa-899256be0460 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 545.609830] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 85c2cfe1-443a-4373-bdba-b2a957a8681b {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 545.610053] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 545.610267] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid ce0340f3-116c-4196-a5e1-ae1225f6c4b3 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 545.610475] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid f59b0b04-643c-497c-90a0-a7f885c1eb3b {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 545.610678] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 94b6a93d-de4d-4600-94af-81dce16b22f7 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 545.614616] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "33c87cb0-cd99-4c35-bcfa-899256be0460" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.615068] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "85c2cfe1-443a-4373-bdba-b2a957a8681b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.615410] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.615673] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "ce0340f3-116c-4196-a5e1-ae1225f6c4b3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.617418] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.617418] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "94b6a93d-de4d-4600-94af-81dce16b22f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.617418] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.617418] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Getting list of instances from cluster (obj){ [ 545.617418] env[61440]: value = "domain-c8" [ 545.617418] env[61440]: _type = "ClusterComputeResource" [ 545.617418] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 545.618320] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f8d560-8cc3-42f3-ae85-9c5fad935515 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.629151] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Got total of 1 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 546.038190] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Updating instance_info_cache with network_info: [{"id": "d2a0357e-751c-4493-a5d7-b995727b244c", "address": "fa:16:3e:91:79:42", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0357e-75", "ovs_interfaceid": "d2a0357e-751c-4493-a5d7-b995727b244c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.061946] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Releasing lock "refresh_cache-33c87cb0-cd99-4c35-bcfa-899256be0460" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.062477] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Instance network_info: |[{"id": "d2a0357e-751c-4493-a5d7-b995727b244c", "address": "fa:16:3e:91:79:42", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0357e-75", "ovs_interfaceid": "d2a0357e-751c-4493-a5d7-b995727b244c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 546.063198] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:79:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2a0357e-751c-4493-a5d7-b995727b244c', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 546.074103] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Creating folder: Project (4197f2cbdc5140a1bc08be023816e01e). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.074691] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b12a10e-b0ff-4592-9538-c612d7ea1e06 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.087090] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Created folder: Project (4197f2cbdc5140a1bc08be023816e01e) in parent group-v843372. [ 546.087339] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Creating folder: Instances. Parent ref: group-v843376. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.087865] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0bc8340b-7748-4ac2-a46a-d0416b1936ee {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.098078] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Created folder: Instances in parent group-v843376. [ 546.098334] env[61440]: DEBUG oslo.service.loopingcall [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 546.098523] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 546.098750] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-868df52f-a4ca-4150-8562-8e50fa337f26 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.127824] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 546.127824] env[61440]: value = "task-4281202" [ 546.127824] env[61440]: _type = "Task" [ 546.127824] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.146860] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281202, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.172830] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.173661] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.191157] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.294275] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.294979] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.303022] env[61440]: INFO nova.compute.claims [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.373248] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Updating instance_info_cache with network_info: [{"id": "73a9dec1-147f-4ac6-b372-bcaf623e1ce5", "address": "fa:16:3e:fe:d5:34", "network": {"id": "8e424544-f49e-4896-830b-5e146db4db6c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1101715711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "befc54ff060b4540bfb4834aef2bf193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9dec1-14", "ovs_interfaceid": "73a9dec1-147f-4ac6-b372-bcaf623e1ce5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.399204] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Releasing lock "refresh_cache-85c2cfe1-443a-4373-bdba-b2a957a8681b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.399917] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Instance network_info: |[{"id": "73a9dec1-147f-4ac6-b372-bcaf623e1ce5", "address": "fa:16:3e:fe:d5:34", "network": {"id": "8e424544-f49e-4896-830b-5e146db4db6c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1101715711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "befc54ff060b4540bfb4834aef2bf193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9dec1-14", "ovs_interfaceid": "73a9dec1-147f-4ac6-b372-bcaf623e1ce5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 546.400217] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:d5:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e272539-d425-489f-9a63-aba692e88933', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73a9dec1-147f-4ac6-b372-bcaf623e1ce5', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 546.413102] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Creating folder: Project (befc54ff060b4540bfb4834aef2bf193). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.415276] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b783267f-cec2-44b3-95a9-c9112f20bc7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.420872] env[61440]: DEBUG nova.compute.manager [req-a82d364d-f804-43b4-848e-b1783ba8c4fc req-5d775de7-5a93-4e8c-8837-02c0fc97534a service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Received event network-vif-plugged-73a9dec1-147f-4ac6-b372-bcaf623e1ce5 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 546.421104] env[61440]: DEBUG oslo_concurrency.lockutils [req-a82d364d-f804-43b4-848e-b1783ba8c4fc req-5d775de7-5a93-4e8c-8837-02c0fc97534a service nova] Acquiring lock "85c2cfe1-443a-4373-bdba-b2a957a8681b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.421302] env[61440]: DEBUG oslo_concurrency.lockutils [req-a82d364d-f804-43b4-848e-b1783ba8c4fc req-5d775de7-5a93-4e8c-8837-02c0fc97534a service nova] Lock "85c2cfe1-443a-4373-bdba-b2a957a8681b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.421871] env[61440]: DEBUG oslo_concurrency.lockutils [req-a82d364d-f804-43b4-848e-b1783ba8c4fc req-5d775de7-5a93-4e8c-8837-02c0fc97534a service nova] Lock "85c2cfe1-443a-4373-bdba-b2a957a8681b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.421871] env[61440]: DEBUG nova.compute.manager [req-a82d364d-f804-43b4-848e-b1783ba8c4fc req-5d775de7-5a93-4e8c-8837-02c0fc97534a service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] No waiting events found dispatching network-vif-plugged-73a9dec1-147f-4ac6-b372-bcaf623e1ce5 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 546.421871] env[61440]: WARNING nova.compute.manager [req-a82d364d-f804-43b4-848e-b1783ba8c4fc req-5d775de7-5a93-4e8c-8837-02c0fc97534a service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Received unexpected event network-vif-plugged-73a9dec1-147f-4ac6-b372-bcaf623e1ce5 for instance with vm_state building and task_state spawning. [ 546.441584] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Created folder: Project (befc54ff060b4540bfb4834aef2bf193) in parent group-v843372. [ 546.441584] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Creating folder: Instances. Parent ref: group-v843379. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.441584] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fe2799d-9603-43d9-a057-20e123a91655 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.453225] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Created folder: Instances in parent group-v843379. [ 546.454804] env[61440]: DEBUG oslo.service.loopingcall [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 546.454804] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 546.454804] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f88ec2dd-a9ca-4f08-9a7e-65650e269b9b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.483129] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 546.483129] env[61440]: value = "task-4281205" [ 546.483129] env[61440]: _type = "Task" [ 546.483129] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.498170] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281205, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.611104] env[61440]: DEBUG nova.compute.manager [req-291f28bf-d68a-4604-ac62-4f900ff20576 req-994444ed-a2f6-4acb-905d-5ad586ac7abb service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Received event network-vif-plugged-d2a0357e-751c-4493-a5d7-b995727b244c {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 546.611391] env[61440]: DEBUG oslo_concurrency.lockutils [req-291f28bf-d68a-4604-ac62-4f900ff20576 req-994444ed-a2f6-4acb-905d-5ad586ac7abb service nova] Acquiring lock "33c87cb0-cd99-4c35-bcfa-899256be0460-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.611642] env[61440]: DEBUG oslo_concurrency.lockutils [req-291f28bf-d68a-4604-ac62-4f900ff20576 req-994444ed-a2f6-4acb-905d-5ad586ac7abb service nova] Lock "33c87cb0-cd99-4c35-bcfa-899256be0460-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.612014] env[61440]: DEBUG oslo_concurrency.lockutils [req-291f28bf-d68a-4604-ac62-4f900ff20576 req-994444ed-a2f6-4acb-905d-5ad586ac7abb service nova] Lock "33c87cb0-cd99-4c35-bcfa-899256be0460-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.615141] env[61440]: DEBUG nova.compute.manager [req-291f28bf-d68a-4604-ac62-4f900ff20576 req-994444ed-a2f6-4acb-905d-5ad586ac7abb service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] No waiting events found dispatching network-vif-plugged-d2a0357e-751c-4493-a5d7-b995727b244c {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 546.616037] env[61440]: WARNING nova.compute.manager [req-291f28bf-d68a-4604-ac62-4f900ff20576 req-994444ed-a2f6-4acb-905d-5ad586ac7abb service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Received unexpected event network-vif-plugged-d2a0357e-751c-4493-a5d7-b995727b244c for instance with vm_state building and task_state spawning. [ 546.617459] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f12d840-9944-4607-bc4e-3af2da61441b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.626318] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eba077-2ed8-464c-a95e-64641e8dfaaf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.678734] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281202, 'name': CreateVM_Task, 'duration_secs': 0.342626} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.678734] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 546.679524] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0441d79e-7c55-48e5-9747-714f4d282b95 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.692019] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fbbd4f-aa09-4d23-adf5-429878bcb7f3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.709183] env[61440]: DEBUG nova.compute.provider_tree [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.725180] env[61440]: DEBUG nova.scheduler.client.report [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.748578] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.453s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.749446] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 546.765608] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.767336] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.767336] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 546.767336] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3cbd81-8f1c-42c5-8a86-d50b6c891ebc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.772785] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Waiting for the task: (returnval){ [ 546.772785] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e6ab4d-ccee-21f5-b0b1-9ab241f4124f" [ 546.772785] env[61440]: _type = "Task" [ 546.772785] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.787333] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e6ab4d-ccee-21f5-b0b1-9ab241f4124f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.800856] env[61440]: DEBUG nova.compute.utils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.803251] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 546.803251] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.823100] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.931522] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.971567] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.971795] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.971955] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.972166] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.972315] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.972577] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.972714] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.972839] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.973114] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.973305] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.973484] env[61440]: DEBUG nova.virt.hardware [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.975434] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44decc58-97ed-4a01-89b1-c3110c852061 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.988716] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d6fc40-1c6c-487c-9934-25b5b3bf7d74 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.003389] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281205, 'name': CreateVM_Task, 'duration_secs': 0.339873} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.014248] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 547.015431] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.286515] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.287880] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 547.287880] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.288212] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.288487] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 547.288794] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0c39384-3e74-4912-b06c-4b047072e2ae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.294497] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Waiting for the task: (returnval){ [ 547.294497] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520bf418-90cc-0330-bfd6-918245ebf929" [ 547.294497] env[61440]: _type = "Task" [ 547.294497] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.309623] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520bf418-90cc-0330-bfd6-918245ebf929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.426620] env[61440]: DEBUG nova.policy [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd18ae4c6eb9d46bda177d204cadbf354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '420abbf16e2f42ac8d14cfb1dc30481e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.635586] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Successfully created port: b2ea6e7a-f21c-470f-b595-c30946c116d9 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.810887] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.811186] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 547.811404] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.080986] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Successfully created port: 6ec8f613-d7d4-4d1f-a41a-de0db4864db5 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.420246] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Successfully updated port: fc9f18b3-bc3b-47ef-a946-75c3513758fb {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 548.439051] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "refresh_cache-b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.439397] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquired lock "refresh_cache-b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.439397] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.610864] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.706123] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Updating instance_info_cache with network_info: [{"id": "fc9f18b3-bc3b-47ef-a946-75c3513758fb", "address": "fa:16:3e:b0:cd:cd", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc9f18b3-bc", "ovs_interfaceid": "fc9f18b3-bc3b-47ef-a946-75c3513758fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.721963] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Releasing lock "refresh_cache-b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.722294] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance network_info: |[{"id": "fc9f18b3-bc3b-47ef-a946-75c3513758fb", "address": "fa:16:3e:b0:cd:cd", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc9f18b3-bc", "ovs_interfaceid": "fc9f18b3-bc3b-47ef-a946-75c3513758fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 549.722713] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:cd:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc9f18b3-bc3b-47ef-a946-75c3513758fb', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.737874] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Creating folder: Project (a82168de312442f1bad306e6b13d639c). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.737874] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78254dba-85f3-440a-bf40-8b586bcadace {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.748989] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Created folder: Project (a82168de312442f1bad306e6b13d639c) in parent group-v843372. [ 549.749386] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Creating folder: Instances. Parent ref: group-v843382. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.749386] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3ea6d2e-7240-4de3-95c1-2db4a69f17b5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.760501] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Created folder: Instances in parent group-v843382. [ 549.760762] env[61440]: DEBUG oslo.service.loopingcall [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.760946] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 549.761170] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f207b41-dc26-46d6-afd0-d37b0cd84d11 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.781351] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 549.781351] env[61440]: value = "task-4281208" [ 549.781351] env[61440]: _type = "Task" [ 549.781351] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.792680] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281208, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.298468] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281208, 'name': CreateVM_Task, 'duration_secs': 0.324382} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.298898] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 550.300992] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.301177] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.301574] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 550.301804] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9bf0238-0ec1-479d-8e20-8c4b2023774b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.308523] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Waiting for the task: (returnval){ [ 550.308523] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a92592-e63c-4f67-d41b-b33502661af1" [ 550.308523] env[61440]: _type = "Task" [ 550.308523] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.318883] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a92592-e63c-4f67-d41b-b33502661af1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.434837] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.435204] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.478695] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 550.589817] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.590099] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.591603] env[61440]: INFO nova.compute.claims [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.659331] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Successfully created port: 4cca7b08-a0eb-41d2-a798-a5a6f437fc5d {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.829765] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.829765] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 550.829765] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.885383] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed03fed7-6ea9-4080-b9bd-662bdcad6637 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.896813] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a27f3a8-2425-4806-b9aa-bf38dd8ee7e1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.940221] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48853da-8030-4226-bd2a-f5559d42dc2c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.948936] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7340b984-be15-4503-ba34-146288ae9733 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.969678] env[61440]: DEBUG nova.compute.provider_tree [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.981741] env[61440]: DEBUG nova.scheduler.client.report [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.997842] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.408s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.998694] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 551.064450] env[61440]: DEBUG nova.compute.utils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.071192] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 551.071192] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 551.087724] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 551.242879] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 551.285504] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.285806] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.285894] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.286269] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.286269] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.286361] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.286599] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.286770] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.287078] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.287991] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.288689] env[61440]: DEBUG nova.virt.hardware [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.289704] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd713152-a85e-4fcc-81c9-6a232a042167 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.306596] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6d2b51-503c-423a-89cc-163dc025d3c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.315605] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.317112] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.317112] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 551.317112] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 551.344182] env[61440]: DEBUG nova.compute.manager [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Received event network-changed-73a9dec1-147f-4ac6-b372-bcaf623e1ce5 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 551.344182] env[61440]: DEBUG nova.compute.manager [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Refreshing instance network info cache due to event network-changed-73a9dec1-147f-4ac6-b372-bcaf623e1ce5. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 551.344182] env[61440]: DEBUG oslo_concurrency.lockutils [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] Acquiring lock "refresh_cache-85c2cfe1-443a-4373-bdba-b2a957a8681b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.344182] env[61440]: DEBUG oslo_concurrency.lockutils [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] Acquired lock "refresh_cache-85c2cfe1-443a-4373-bdba-b2a957a8681b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.344338] env[61440]: DEBUG nova.network.neutron [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Refreshing network info cache for port 73a9dec1-147f-4ac6-b372-bcaf623e1ce5 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 551.359387] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.359967] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.360217] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.360388] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.360552] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.360709] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.360867] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.361035] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 551.361218] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 551.362342] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.366314] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.367161] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.367702] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.368024] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.368156] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.368339] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 551.368504] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.391065] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.391917] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.391917] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.391917] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 551.393325] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ace33c3-717e-4843-bec9-54fac6d66c36 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.404640] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403afe97-b285-4519-939a-59898e1e768b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.422746] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a8fec6-eb36-4cd9-a635-f629ddbd63eb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.429741] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf8dc23-8bac-45f5-a26a-f01163c23d57 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.471564] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180696MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 551.471801] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.472278] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.562169] env[61440]: DEBUG nova.policy [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74f5f7a4ba524c4890868c2c5bb40b5f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ecf629d533c4dfcacd4a3dab3729e9e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 551.584254] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 33c87cb0-cd99-4c35-bcfa-899256be0460 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584254] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 85c2cfe1-443a-4373-bdba-b2a957a8681b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584254] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584254] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ce0340f3-116c-4196-a5e1-ae1225f6c4b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584521] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f59b0b04-643c-497c-90a0-a7f885c1eb3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584521] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 94b6a93d-de4d-4600-94af-81dce16b22f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584521] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584521] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 551.584634] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 551.584634] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=183GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 551.687602] env[61440]: DEBUG nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Received event network-changed-d2a0357e-751c-4493-a5d7-b995727b244c {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 551.688039] env[61440]: DEBUG nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Refreshing instance network info cache due to event network-changed-d2a0357e-751c-4493-a5d7-b995727b244c. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 551.688658] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Acquiring lock "refresh_cache-33c87cb0-cd99-4c35-bcfa-899256be0460" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.688959] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Acquired lock "refresh_cache-33c87cb0-cd99-4c35-bcfa-899256be0460" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.689400] env[61440]: DEBUG nova.network.neutron [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Refreshing network info cache for port d2a0357e-751c-4493-a5d7-b995727b244c {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 551.777200] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c780ca25-d337-4fb3-ab0e-d6e46750c8a4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.788765] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb37e86-72ab-478c-94f1-17a809c96deb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.822353] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f758fb1-1fdf-43db-a31d-7e6031a4318b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.830209] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9b6e6c-b2de-4a78-b611-b85c9d98696e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.845453] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.862017] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.887043] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 551.887043] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.411s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.691614] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Successfully updated port: b2ea6e7a-f21c-470f-b595-c30946c116d9 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.711699] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "refresh_cache-94b6a93d-de4d-4600-94af-81dce16b22f7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.712730] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquired lock "refresh_cache-94b6a93d-de4d-4600-94af-81dce16b22f7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.713102] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.988166] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.266298] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Successfully updated port: 6ec8f613-d7d4-4d1f-a41a-de0db4864db5 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 553.282063] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "refresh_cache-f59b0b04-643c-497c-90a0-a7f885c1eb3b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.282179] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquired lock "refresh_cache-f59b0b04-643c-497c-90a0-a7f885c1eb3b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.282308] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.411341] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.411636] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.449668] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 553.500568] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.535254] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.535890] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.538912] env[61440]: INFO nova.compute.claims [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.748893] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c389ceb7-76e6-4dcb-a2fa-0c1d3dc5aca2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.757138] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc6725d-e861-4936-92d7-fc6a10451476 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.801281] env[61440]: DEBUG nova.network.neutron [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Updated VIF entry in instance network info cache for port d2a0357e-751c-4493-a5d7-b995727b244c. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 553.802010] env[61440]: DEBUG nova.network.neutron [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Updating instance_info_cache with network_info: [{"id": "d2a0357e-751c-4493-a5d7-b995727b244c", "address": "fa:16:3e:91:79:42", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0357e-75", "ovs_interfaceid": "d2a0357e-751c-4493-a5d7-b995727b244c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.810335] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84451743-743e-4296-864b-fbb6c9df9c2c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.821035] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5939218c-0290-40f9-9f3a-d163f011b6fa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.825552] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Releasing lock "refresh_cache-33c87cb0-cd99-4c35-bcfa-899256be0460" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.826406] env[61440]: DEBUG nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Received event network-vif-plugged-fc9f18b3-bc3b-47ef-a946-75c3513758fb {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 553.826406] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Acquiring lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.826406] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.826550] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.826740] env[61440]: DEBUG nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] No waiting events found dispatching network-vif-plugged-fc9f18b3-bc3b-47ef-a946-75c3513758fb {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 553.826948] env[61440]: WARNING nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Received unexpected event network-vif-plugged-fc9f18b3-bc3b-47ef-a946-75c3513758fb for instance with vm_state building and task_state spawning. [ 553.827167] env[61440]: DEBUG nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Received event network-changed-fc9f18b3-bc3b-47ef-a946-75c3513758fb {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 553.827389] env[61440]: DEBUG nova.compute.manager [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Refreshing instance network info cache due to event network-changed-fc9f18b3-bc3b-47ef-a946-75c3513758fb. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 553.827638] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Acquiring lock "refresh_cache-b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.827821] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Acquired lock "refresh_cache-b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.828027] env[61440]: DEBUG nova.network.neutron [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Refreshing network info cache for port fc9f18b3-bc3b-47ef-a946-75c3513758fb {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 553.840211] env[61440]: DEBUG nova.compute.provider_tree [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.851708] env[61440]: DEBUG nova.scheduler.client.report [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.870602] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.871116] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 553.915543] env[61440]: DEBUG nova.compute.utils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.917546] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 553.917722] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 553.932394] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 553.953848] env[61440]: DEBUG nova.network.neutron [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Updated VIF entry in instance network info cache for port 73a9dec1-147f-4ac6-b372-bcaf623e1ce5. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 553.953848] env[61440]: DEBUG nova.network.neutron [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Updating instance_info_cache with network_info: [{"id": "73a9dec1-147f-4ac6-b372-bcaf623e1ce5", "address": "fa:16:3e:fe:d5:34", "network": {"id": "8e424544-f49e-4896-830b-5e146db4db6c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1101715711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "befc54ff060b4540bfb4834aef2bf193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9dec1-14", "ovs_interfaceid": "73a9dec1-147f-4ac6-b372-bcaf623e1ce5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.971149] env[61440]: DEBUG oslo_concurrency.lockutils [req-bb7bd78d-0114-4773-bb8b-d53c4a834c7c req-b90374ed-c131-4f59-a86f-40f105430688 service nova] Releasing lock "refresh_cache-85c2cfe1-443a-4373-bdba-b2a957a8681b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.044314] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 554.083337] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 554.083920] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 554.083920] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.083920] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 554.084043] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.084208] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 554.084319] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 554.084475] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 554.084677] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 554.086234] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 554.086234] env[61440]: DEBUG nova.virt.hardware [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 554.086710] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39c2a87-f85d-4b18-b727-8fc52c90dec4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.097886] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee2519f-c4bb-4650-96b5-9fd44828c6c7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.298545] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Updating instance_info_cache with network_info: [{"id": "b2ea6e7a-f21c-470f-b595-c30946c116d9", "address": "fa:16:3e:0f:53:a0", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ea6e7a-f2", "ovs_interfaceid": "b2ea6e7a-f21c-470f-b595-c30946c116d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.321150] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Releasing lock "refresh_cache-94b6a93d-de4d-4600-94af-81dce16b22f7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.321579] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance network_info: |[{"id": "b2ea6e7a-f21c-470f-b595-c30946c116d9", "address": "fa:16:3e:0f:53:a0", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ea6e7a-f2", "ovs_interfaceid": "b2ea6e7a-f21c-470f-b595-c30946c116d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.322397] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:53:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2ea6e7a-f21c-470f-b595-c30946c116d9', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.332222] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Creating folder: Project (696cc589cef24864bb136b996daad710). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.332222] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4e7bcd1-071f-4284-8f99-d8985b1ca8db {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.341577] env[61440]: DEBUG nova.policy [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58d29102bf5c44a592ba08c8fb319606', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2701e3de21f42a7a286b54ec498da89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 554.345164] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Created folder: Project (696cc589cef24864bb136b996daad710) in parent group-v843372. [ 554.345370] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Creating folder: Instances. Parent ref: group-v843385. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.345609] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be233fc7-63db-4834-9190-f124bebf7dd1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.362652] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Created folder: Instances in parent group-v843385. [ 554.363143] env[61440]: DEBUG oslo.service.loopingcall [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.363398] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.363653] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9876a10-19e8-42a2-a0f0-a172bc750964 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.386973] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.386973] env[61440]: value = "task-4281211" [ 554.386973] env[61440]: _type = "Task" [ 554.386973] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.394973] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281211, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.763029] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Updating instance_info_cache with network_info: [{"id": "6ec8f613-d7d4-4d1f-a41a-de0db4864db5", "address": "fa:16:3e:1b:18:d4", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec8f613-d7", "ovs_interfaceid": "6ec8f613-d7d4-4d1f-a41a-de0db4864db5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.781017] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Releasing lock "refresh_cache-f59b0b04-643c-497c-90a0-a7f885c1eb3b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.781017] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance network_info: |[{"id": "6ec8f613-d7d4-4d1f-a41a-de0db4864db5", "address": "fa:16:3e:1b:18:d4", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec8f613-d7", "ovs_interfaceid": "6ec8f613-d7d4-4d1f-a41a-de0db4864db5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.781470] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:18:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ec8f613-d7d4-4d1f-a41a-de0db4864db5', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.790720] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Creating folder: Project (b71be25936c74457ab25a895906fbbc6). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.791890] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba787e94-da89-4530-8565-586a27650793 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.808917] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Created folder: Project (b71be25936c74457ab25a895906fbbc6) in parent group-v843372. [ 554.808917] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Creating folder: Instances. Parent ref: group-v843388. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.812284] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a7c46d3-5546-4ff8-ad88-ad9f7dd00c01 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.824344] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Created folder: Instances in parent group-v843388. [ 554.824344] env[61440]: DEBUG oslo.service.loopingcall [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.824344] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.824344] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aad95175-46cc-4fa2-94cf-65d1cb099803 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.852432] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.852432] env[61440]: value = "task-4281214" [ 554.852432] env[61440]: _type = "Task" [ 554.852432] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.859595] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281214, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.896317] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281211, 'name': CreateVM_Task, 'duration_secs': 0.347613} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.896487] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.897231] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.897971] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.897971] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.898219] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2fd676-d748-4c2e-b98b-6b479abe9440 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.902754] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Waiting for the task: (returnval){ [ 554.902754] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e0163e-b8fb-ab25-5ac8-f340eeae8174" [ 554.902754] env[61440]: _type = "Task" [ 554.902754] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.911178] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e0163e-b8fb-ab25-5ac8-f340eeae8174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.194268] env[61440]: DEBUG nova.network.neutron [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Updated VIF entry in instance network info cache for port fc9f18b3-bc3b-47ef-a946-75c3513758fb. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 555.194672] env[61440]: DEBUG nova.network.neutron [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Updating instance_info_cache with network_info: [{"id": "fc9f18b3-bc3b-47ef-a946-75c3513758fb", "address": "fa:16:3e:b0:cd:cd", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc9f18b3-bc", "ovs_interfaceid": "fc9f18b3-bc3b-47ef-a946-75c3513758fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.217373] env[61440]: DEBUG oslo_concurrency.lockutils [req-f40b6f94-1007-48f8-ad9a-7b9ab8a9a9ac req-501121be-b54f-4f48-ae6c-362caf3a7b6d service nova] Releasing lock "refresh_cache-b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.330445] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Successfully created port: df0b7982-0b15-498e-975e-61b6bb6134a4 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.362405] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281214, 'name': CreateVM_Task, 'duration_secs': 0.345912} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.362522] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.363353] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.420211] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.420211] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.420211] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.420211] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.420344] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.420344] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8826173-a1d3-4c61-abc1-7971790fdd37 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.428332] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Waiting for the task: (returnval){ [ 555.428332] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52eaf293-1ca0-3d46-ef7d-c413fd8b951d" [ 555.428332] env[61440]: _type = "Task" [ 555.428332] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.436547] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52eaf293-1ca0-3d46-ef7d-c413fd8b951d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.532206] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Successfully updated port: 4cca7b08-a0eb-41d2-a798-a5a6f437fc5d {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.549641] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "refresh_cache-fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.549755] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquired lock "refresh_cache-fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.549853] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.688854] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.938991] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.938991] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.938991] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.423286] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Updating instance_info_cache with network_info: [{"id": "4cca7b08-a0eb-41d2-a798-a5a6f437fc5d", "address": "fa:16:3e:81:06:2c", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cca7b08-a0", "ovs_interfaceid": "4cca7b08-a0eb-41d2-a798-a5a6f437fc5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.446323] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Received event network-vif-plugged-b2ea6e7a-f21c-470f-b595-c30946c116d9 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 556.446621] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Acquiring lock "94b6a93d-de4d-4600-94af-81dce16b22f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.446903] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.447125] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.447333] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] No waiting events found dispatching network-vif-plugged-b2ea6e7a-f21c-470f-b595-c30946c116d9 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 556.447546] env[61440]: WARNING nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Received unexpected event network-vif-plugged-b2ea6e7a-f21c-470f-b595-c30946c116d9 for instance with vm_state building and task_state spawning. [ 556.447740] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Received event network-vif-plugged-6ec8f613-d7d4-4d1f-a41a-de0db4864db5 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 556.447938] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Acquiring lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.448171] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.448369] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.448567] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] No waiting events found dispatching network-vif-plugged-6ec8f613-d7d4-4d1f-a41a-de0db4864db5 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 556.448921] env[61440]: WARNING nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Received unexpected event network-vif-plugged-6ec8f613-d7d4-4d1f-a41a-de0db4864db5 for instance with vm_state building and task_state spawning. [ 556.449150] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Received event network-changed-b2ea6e7a-f21c-470f-b595-c30946c116d9 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 556.449347] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Refreshing instance network info cache due to event network-changed-b2ea6e7a-f21c-470f-b595-c30946c116d9. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 556.449561] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Acquiring lock "refresh_cache-94b6a93d-de4d-4600-94af-81dce16b22f7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.449730] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Acquired lock "refresh_cache-94b6a93d-de4d-4600-94af-81dce16b22f7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.449929] env[61440]: DEBUG nova.network.neutron [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Refreshing network info cache for port b2ea6e7a-f21c-470f-b595-c30946c116d9 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 556.454644] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Releasing lock "refresh_cache-fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.454644] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance network_info: |[{"id": "4cca7b08-a0eb-41d2-a798-a5a6f437fc5d", "address": "fa:16:3e:81:06:2c", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cca7b08-a0", "ovs_interfaceid": "4cca7b08-a0eb-41d2-a798-a5a6f437fc5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 556.454868] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:06:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cca7b08-a0eb-41d2-a798-a5a6f437fc5d', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.461057] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Creating folder: Project (420abbf16e2f42ac8d14cfb1dc30481e). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.462595] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bf583e0-e77a-4478-b31d-79a306c70ba2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.477390] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Created folder: Project (420abbf16e2f42ac8d14cfb1dc30481e) in parent group-v843372. [ 556.477720] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Creating folder: Instances. Parent ref: group-v843391. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.477783] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52529d1e-8ebb-4d4b-996e-e8304b5afe0f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.487186] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Created folder: Instances in parent group-v843391. [ 556.487295] env[61440]: DEBUG oslo.service.loopingcall [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.487431] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 556.488041] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db1b1217-7980-4705-b68c-411ce242bef3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.512174] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.512174] env[61440]: value = "task-4281217" [ 556.512174] env[61440]: _type = "Task" [ 556.512174] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.520322] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281217, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.027332] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281217, 'name': CreateVM_Task, 'duration_secs': 0.326832} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.027441] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 557.028097] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.028266] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.028597] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.028856] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e80d86-df9d-40f6-ad5b-361735a81097 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.034556] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Waiting for the task: (returnval){ [ 557.034556] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5267a4ee-bc20-937c-c9fe-9c44c8583937" [ 557.034556] env[61440]: _type = "Task" [ 557.034556] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.045898] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5267a4ee-bc20-937c-c9fe-9c44c8583937, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.398611] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Successfully created port: 9b49474d-7d64-4841-bbe5-13f6b3c6a813 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.484743] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.485780] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.514372] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 557.547929] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.548219] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.548432] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.584694] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.584983] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.587196] env[61440]: INFO nova.compute.claims [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.811994] env[61440]: DEBUG nova.network.neutron [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Updated VIF entry in instance network info cache for port b2ea6e7a-f21c-470f-b595-c30946c116d9. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 557.811994] env[61440]: DEBUG nova.network.neutron [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Updating instance_info_cache with network_info: [{"id": "b2ea6e7a-f21c-470f-b595-c30946c116d9", "address": "fa:16:3e:0f:53:a0", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ea6e7a-f2", "ovs_interfaceid": "b2ea6e7a-f21c-470f-b595-c30946c116d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.831873] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Releasing lock "refresh_cache-94b6a93d-de4d-4600-94af-81dce16b22f7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.835110] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Received event network-changed-6ec8f613-d7d4-4d1f-a41a-de0db4864db5 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 557.835110] env[61440]: DEBUG nova.compute.manager [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Refreshing instance network info cache due to event network-changed-6ec8f613-d7d4-4d1f-a41a-de0db4864db5. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 557.835606] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Acquiring lock "refresh_cache-f59b0b04-643c-497c-90a0-a7f885c1eb3b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.835606] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Acquired lock "refresh_cache-f59b0b04-643c-497c-90a0-a7f885c1eb3b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.835606] env[61440]: DEBUG nova.network.neutron [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Refreshing network info cache for port 6ec8f613-d7d4-4d1f-a41a-de0db4864db5 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 557.857019] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830ccddf-3055-4689-b5f1-686ed1956574 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.870146] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fa3bca-1799-4b97-b6a7-6c860e68c5a4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.911524] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7976b1b-5ac4-4bae-b23b-b7e777fbc7b7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.920342] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede1c5d5-3a0f-4e2f-94b1-a24358289826 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.935723] env[61440]: DEBUG nova.compute.provider_tree [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.950591] env[61440]: DEBUG nova.scheduler.client.report [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 557.973021] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.385s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.973021] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 558.025392] env[61440]: DEBUG nova.compute.utils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 558.027041] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 558.027948] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 558.047997] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 558.080904] env[61440]: DEBUG nova.compute.manager [req-80e2e7c2-d521-491c-8d3a-8dbbfdabf88e req-bb145897-efcc-41f6-8266-633f03feea44 service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Received event network-vif-plugged-4cca7b08-a0eb-41d2-a798-a5a6f437fc5d {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 558.082457] env[61440]: DEBUG oslo_concurrency.lockutils [req-80e2e7c2-d521-491c-8d3a-8dbbfdabf88e req-bb145897-efcc-41f6-8266-633f03feea44 service nova] Acquiring lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.082457] env[61440]: DEBUG oslo_concurrency.lockutils [req-80e2e7c2-d521-491c-8d3a-8dbbfdabf88e req-bb145897-efcc-41f6-8266-633f03feea44 service nova] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.082457] env[61440]: DEBUG oslo_concurrency.lockutils [req-80e2e7c2-d521-491c-8d3a-8dbbfdabf88e req-bb145897-efcc-41f6-8266-633f03feea44 service nova] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.082457] env[61440]: DEBUG nova.compute.manager [req-80e2e7c2-d521-491c-8d3a-8dbbfdabf88e req-bb145897-efcc-41f6-8266-633f03feea44 service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] No waiting events found dispatching network-vif-plugged-4cca7b08-a0eb-41d2-a798-a5a6f437fc5d {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 558.082811] env[61440]: WARNING nova.compute.manager [req-80e2e7c2-d521-491c-8d3a-8dbbfdabf88e req-bb145897-efcc-41f6-8266-633f03feea44 service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Received unexpected event network-vif-plugged-4cca7b08-a0eb-41d2-a798-a5a6f437fc5d for instance with vm_state building and task_state spawning. [ 558.142188] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 558.178336] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.178585] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.179297] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.179560] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.179716] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.179896] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.180095] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.181105] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.182252] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.182252] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.182252] env[61440]: DEBUG nova.virt.hardware [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.184271] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c505079-eb62-4246-83e1-b01ee4e27d9f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.199033] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1a9ecb-65d9-4978-9c14-03ab46608994 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.310932] env[61440]: DEBUG nova.policy [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6c5e555721d40c6875c93fa6dad1434', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db06e739a2d34a9ebc3fbd6bd14c3ca5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 558.402596] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "2d853a6e-4c2f-401e-9088-54e82bec1150" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.403989] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.196258] env[61440]: DEBUG nova.network.neutron [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Updated VIF entry in instance network info cache for port 6ec8f613-d7d4-4d1f-a41a-de0db4864db5. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 559.196258] env[61440]: DEBUG nova.network.neutron [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Updating instance_info_cache with network_info: [{"id": "6ec8f613-d7d4-4d1f-a41a-de0db4864db5", "address": "fa:16:3e:1b:18:d4", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec8f613-d7", "ovs_interfaceid": "6ec8f613-d7d4-4d1f-a41a-de0db4864db5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.215640] env[61440]: DEBUG oslo_concurrency.lockutils [req-5cfe4a16-6446-4c17-b6cd-183c8889bab3 req-33939f6c-1917-481f-b7c1-10e817fdc32c service nova] Releasing lock "refresh_cache-f59b0b04-643c-497c-90a0-a7f885c1eb3b" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.604130] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Successfully updated port: df0b7982-0b15-498e-975e-61b6bb6134a4 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 559.619598] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "refresh_cache-f21a02ec-4fa2-439c-aa56-570e175a8b5e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.620173] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquired lock "refresh_cache-f21a02ec-4fa2-439c-aa56-570e175a8b5e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.620173] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 559.766177] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.339974] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Successfully created port: 2c00ce65-32da-4259-8a15-d2825bfee5eb {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.776923] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Updating instance_info_cache with network_info: [{"id": "df0b7982-0b15-498e-975e-61b6bb6134a4", "address": "fa:16:3e:a1:52:c4", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0b7982-0b", "ovs_interfaceid": "df0b7982-0b15-498e-975e-61b6bb6134a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.807641] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Releasing lock "refresh_cache-f21a02ec-4fa2-439c-aa56-570e175a8b5e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.807945] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance network_info: |[{"id": "df0b7982-0b15-498e-975e-61b6bb6134a4", "address": "fa:16:3e:a1:52:c4", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0b7982-0b", "ovs_interfaceid": "df0b7982-0b15-498e-975e-61b6bb6134a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 560.808456] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:52:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df0b7982-0b15-498e-975e-61b6bb6134a4', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 560.823330] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Creating folder: Project (4ecf629d533c4dfcacd4a3dab3729e9e). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.825125] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65aa0735-cffa-4f9b-ace1-067584ddb254 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.840124] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Created folder: Project (4ecf629d533c4dfcacd4a3dab3729e9e) in parent group-v843372. [ 560.840336] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Creating folder: Instances. Parent ref: group-v843394. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.841227] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42b1238b-3414-4b50-a2aa-29efe75c995b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.851232] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Created folder: Instances in parent group-v843394. [ 560.851278] env[61440]: DEBUG oslo.service.loopingcall [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.851480] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 560.851639] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff484a6e-14a6-493f-9eaa-95ce8a0f9950 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.874857] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 560.874857] env[61440]: value = "task-4281220" [ 560.874857] env[61440]: _type = "Task" [ 560.874857] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.882741] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281220, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.061960] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Successfully updated port: 9b49474d-7d64-4841-bbe5-13f6b3c6a813 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 561.079059] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "refresh_cache-a84b19ff-892b-43cb-9fd7-8f8b23f612a6" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.079328] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired lock "refresh_cache-a84b19ff-892b-43cb-9fd7-8f8b23f612a6" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.079619] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.176237] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.387594] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281220, 'name': CreateVM_Task, 'duration_secs': 0.356826} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.387895] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 561.388384] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.388553] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.390130] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 561.390130] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aab4a975-d4cb-4268-ba0e-cbaef4eece82 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.397430] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Waiting for the task: (returnval){ [ 561.397430] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5248daa5-dfc1-100f-7b7a-a8bb8bcc35f5" [ 561.397430] env[61440]: _type = "Task" [ 561.397430] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.408025] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5248daa5-dfc1-100f-7b7a-a8bb8bcc35f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.874463] env[61440]: DEBUG nova.compute.manager [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Received event network-changed-4cca7b08-a0eb-41d2-a798-a5a6f437fc5d {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 561.874780] env[61440]: DEBUG nova.compute.manager [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Refreshing instance network info cache due to event network-changed-4cca7b08-a0eb-41d2-a798-a5a6f437fc5d. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 561.875368] env[61440]: DEBUG oslo_concurrency.lockutils [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] Acquiring lock "refresh_cache-fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.875531] env[61440]: DEBUG oslo_concurrency.lockutils [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] Acquired lock "refresh_cache-fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.875709] env[61440]: DEBUG nova.network.neutron [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Refreshing network info cache for port 4cca7b08-a0eb-41d2-a798-a5a6f437fc5d {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 561.914111] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.914161] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 561.914688] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.164542] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Updating instance_info_cache with network_info: [{"id": "9b49474d-7d64-4841-bbe5-13f6b3c6a813", "address": "fa:16:3e:5b:69:03", "network": {"id": "fcf6a284-5561-4015-a08c-9ea5ed0cdcaa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1432724510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2701e3de21f42a7a286b54ec498da89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b49474d-7d", "ovs_interfaceid": "9b49474d-7d64-4841-bbe5-13f6b3c6a813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.204880] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Releasing lock "refresh_cache-a84b19ff-892b-43cb-9fd7-8f8b23f612a6" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.207040] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance network_info: |[{"id": "9b49474d-7d64-4841-bbe5-13f6b3c6a813", "address": "fa:16:3e:5b:69:03", "network": {"id": "fcf6a284-5561-4015-a08c-9ea5ed0cdcaa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1432724510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2701e3de21f42a7a286b54ec498da89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b49474d-7d", "ovs_interfaceid": "9b49474d-7d64-4841-bbe5-13f6b3c6a813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 562.208467] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:69:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721e64ee-fc02-4eb5-9c8c-ea55647a1b92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b49474d-7d64-4841-bbe5-13f6b3c6a813', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 562.222751] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating folder: Project (a2701e3de21f42a7a286b54ec498da89). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 562.223798] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53239885-da01-4190-adbf-e928892fdbab {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.240336] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Created folder: Project (a2701e3de21f42a7a286b54ec498da89) in parent group-v843372. [ 562.241139] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating folder: Instances. Parent ref: group-v843397. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 562.241476] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26d3b170-cd79-4b94-abe7-f39f45f447e5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.257509] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "3395aaef-0db6-4fab-b8a5-79b781129690" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.258199] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "3395aaef-0db6-4fab-b8a5-79b781129690" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.258790] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Created folder: Instances in parent group-v843397. [ 562.259568] env[61440]: DEBUG oslo.service.loopingcall [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.259568] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 562.259820] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4789065d-a32c-46d3-a712-8122aeafee56 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.282805] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 562.282805] env[61440]: value = "task-4281223" [ 562.282805] env[61440]: _type = "Task" [ 562.282805] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.294639] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281223, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.797662] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281223, 'name': CreateVM_Task} progress is 99%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.155583] env[61440]: DEBUG nova.compute.manager [req-b6519742-5345-4fbc-b4f5-85caf36fc388 req-dfcec127-1e6d-4258-bc10-dde62998a807 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Received event network-vif-plugged-df0b7982-0b15-498e-975e-61b6bb6134a4 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 563.156277] env[61440]: DEBUG oslo_concurrency.lockutils [req-b6519742-5345-4fbc-b4f5-85caf36fc388 req-dfcec127-1e6d-4258-bc10-dde62998a807 service nova] Acquiring lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.156471] env[61440]: DEBUG oslo_concurrency.lockutils [req-b6519742-5345-4fbc-b4f5-85caf36fc388 req-dfcec127-1e6d-4258-bc10-dde62998a807 service nova] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.156762] env[61440]: DEBUG oslo_concurrency.lockutils [req-b6519742-5345-4fbc-b4f5-85caf36fc388 req-dfcec127-1e6d-4258-bc10-dde62998a807 service nova] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.157109] env[61440]: DEBUG nova.compute.manager [req-b6519742-5345-4fbc-b4f5-85caf36fc388 req-dfcec127-1e6d-4258-bc10-dde62998a807 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] No waiting events found dispatching network-vif-plugged-df0b7982-0b15-498e-975e-61b6bb6134a4 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 563.157305] env[61440]: WARNING nova.compute.manager [req-b6519742-5345-4fbc-b4f5-85caf36fc388 req-dfcec127-1e6d-4258-bc10-dde62998a807 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Received unexpected event network-vif-plugged-df0b7982-0b15-498e-975e-61b6bb6134a4 for instance with vm_state building and task_state spawning. [ 563.300713] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281223, 'name': CreateVM_Task, 'duration_secs': 0.564722} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.300931] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 563.302590] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.302810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.303148] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 563.303413] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daf50cbc-81dd-4e8a-b741-48cc05138311 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.309286] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 563.309286] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52249666-c4b8-1dc0-cf39-8cc11d8c9f87" [ 563.309286] env[61440]: _type = "Task" [ 563.309286] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.318448] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52249666-c4b8-1dc0-cf39-8cc11d8c9f87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.325270] env[61440]: DEBUG nova.network.neutron [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Updated VIF entry in instance network info cache for port 4cca7b08-a0eb-41d2-a798-a5a6f437fc5d. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.325906] env[61440]: DEBUG nova.network.neutron [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Updating instance_info_cache with network_info: [{"id": "4cca7b08-a0eb-41d2-a798-a5a6f437fc5d", "address": "fa:16:3e:81:06:2c", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cca7b08-a0", "ovs_interfaceid": "4cca7b08-a0eb-41d2-a798-a5a6f437fc5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.346356] env[61440]: DEBUG oslo_concurrency.lockutils [req-22cf00ed-6f2f-4572-9306-6bf66aa29160 req-29b48125-cc22-49a8-8547-10bd179178fd service nova] Releasing lock "refresh_cache-fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.813146] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "1438771e-fd84-4dac-81b1-c2df19972ebe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.813469] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.828885] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.829508] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 563.829900] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.101590] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Successfully updated port: 2c00ce65-32da-4259-8a15-d2825bfee5eb {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 564.120322] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "refresh_cache-f2e370a1-6644-4794-8c9c-0ac9d7a4c156" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.120991] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired lock "refresh_cache-f2e370a1-6644-4794-8c9c-0ac9d7a4c156" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.121267] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 564.220237] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.600262] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Updating instance_info_cache with network_info: [{"id": "2c00ce65-32da-4259-8a15-d2825bfee5eb", "address": "fa:16:3e:af:3f:7c", "network": {"id": "73748327-c796-4a9e-a583-c7e8ce60c1cf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-164031856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db06e739a2d34a9ebc3fbd6bd14c3ca5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c00ce65-32", "ovs_interfaceid": "2c00ce65-32da-4259-8a15-d2825bfee5eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.619797] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Releasing lock "refresh_cache-f2e370a1-6644-4794-8c9c-0ac9d7a4c156" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.620115] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance network_info: |[{"id": "2c00ce65-32da-4259-8a15-d2825bfee5eb", "address": "fa:16:3e:af:3f:7c", "network": {"id": "73748327-c796-4a9e-a583-c7e8ce60c1cf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-164031856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db06e739a2d34a9ebc3fbd6bd14c3ca5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c00ce65-32", "ovs_interfaceid": "2c00ce65-32da-4259-8a15-d2825bfee5eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 564.620538] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:3f:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c00ce65-32da-4259-8a15-d2825bfee5eb', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 564.628764] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating folder: Project (db06e739a2d34a9ebc3fbd6bd14c3ca5). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 564.630721] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e46b1563-93e1-4bdc-b5e0-940aeaa29fd5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.642844] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Created folder: Project (db06e739a2d34a9ebc3fbd6bd14c3ca5) in parent group-v843372. [ 564.642969] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating folder: Instances. Parent ref: group-v843400. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 564.643232] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4d0755e-f229-4019-ac43-156f7331a41f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.656440] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Created folder: Instances in parent group-v843400. [ 564.656440] env[61440]: DEBUG oslo.service.loopingcall [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.656440] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 564.656440] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2506fb61-79bd-4be8-a195-c5b62d92e84c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.683287] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 564.683287] env[61440]: value = "task-4281226" [ 564.683287] env[61440]: _type = "Task" [ 564.683287] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.692472] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281226, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.202339] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281226, 'name': CreateVM_Task, 'duration_secs': 0.379868} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.202629] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 565.203275] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.204251] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.204664] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 565.205013] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-102364b2-b02a-4dc3-97d1-e274a73daffd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.210350] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 565.210350] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52040de5-e8d9-98e1-9ca9-1c5f4546bb8f" [ 565.210350] env[61440]: _type = "Task" [ 565.210350] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.219414] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52040de5-e8d9-98e1-9ca9-1c5f4546bb8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.725093] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.725093] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 565.725093] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.300045] env[61440]: DEBUG nova.compute.manager [req-8ab931ba-f732-4ff9-ae6f-9141f7953997 req-0348bf79-a36a-40d1-b20e-5a85b673fafb service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Received event network-vif-plugged-2c00ce65-32da-4259-8a15-d2825bfee5eb {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 566.300502] env[61440]: DEBUG oslo_concurrency.lockutils [req-8ab931ba-f732-4ff9-ae6f-9141f7953997 req-0348bf79-a36a-40d1-b20e-5a85b673fafb service nova] Acquiring lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.300744] env[61440]: DEBUG oslo_concurrency.lockutils [req-8ab931ba-f732-4ff9-ae6f-9141f7953997 req-0348bf79-a36a-40d1-b20e-5a85b673fafb service nova] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.301551] env[61440]: DEBUG oslo_concurrency.lockutils [req-8ab931ba-f732-4ff9-ae6f-9141f7953997 req-0348bf79-a36a-40d1-b20e-5a85b673fafb service nova] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.301551] env[61440]: DEBUG nova.compute.manager [req-8ab931ba-f732-4ff9-ae6f-9141f7953997 req-0348bf79-a36a-40d1-b20e-5a85b673fafb service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] No waiting events found dispatching network-vif-plugged-2c00ce65-32da-4259-8a15-d2825bfee5eb {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 566.301551] env[61440]: WARNING nova.compute.manager [req-8ab931ba-f732-4ff9-ae6f-9141f7953997 req-0348bf79-a36a-40d1-b20e-5a85b673fafb service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Received unexpected event network-vif-plugged-2c00ce65-32da-4259-8a15-d2825bfee5eb for instance with vm_state building and task_state spawning. [ 566.492036] env[61440]: DEBUG nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Received event network-changed-df0b7982-0b15-498e-975e-61b6bb6134a4 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 566.492352] env[61440]: DEBUG nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Refreshing instance network info cache due to event network-changed-df0b7982-0b15-498e-975e-61b6bb6134a4. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 566.492447] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Acquiring lock "refresh_cache-f21a02ec-4fa2-439c-aa56-570e175a8b5e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.492684] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Acquired lock "refresh_cache-f21a02ec-4fa2-439c-aa56-570e175a8b5e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.492759] env[61440]: DEBUG nova.network.neutron [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Refreshing network info cache for port df0b7982-0b15-498e-975e-61b6bb6134a4 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 567.544091] env[61440]: DEBUG nova.network.neutron [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Updated VIF entry in instance network info cache for port df0b7982-0b15-498e-975e-61b6bb6134a4. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 567.545152] env[61440]: DEBUG nova.network.neutron [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Updating instance_info_cache with network_info: [{"id": "df0b7982-0b15-498e-975e-61b6bb6134a4", "address": "fa:16:3e:a1:52:c4", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0b7982-0b", "ovs_interfaceid": "df0b7982-0b15-498e-975e-61b6bb6134a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.563064] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Releasing lock "refresh_cache-f21a02ec-4fa2-439c-aa56-570e175a8b5e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.563064] env[61440]: DEBUG nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Received event network-vif-plugged-9b49474d-7d64-4841-bbe5-13f6b3c6a813 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 567.563064] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Acquiring lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.563064] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.563352] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.563352] env[61440]: DEBUG nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] No waiting events found dispatching network-vif-plugged-9b49474d-7d64-4841-bbe5-13f6b3c6a813 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 567.563352] env[61440]: WARNING nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Received unexpected event network-vif-plugged-9b49474d-7d64-4841-bbe5-13f6b3c6a813 for instance with vm_state building and task_state spawning. [ 567.563352] env[61440]: DEBUG nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Received event network-changed-9b49474d-7d64-4841-bbe5-13f6b3c6a813 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 567.563479] env[61440]: DEBUG nova.compute.manager [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Refreshing instance network info cache due to event network-changed-9b49474d-7d64-4841-bbe5-13f6b3c6a813. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 567.563479] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Acquiring lock "refresh_cache-a84b19ff-892b-43cb-9fd7-8f8b23f612a6" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.563479] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Acquired lock "refresh_cache-a84b19ff-892b-43cb-9fd7-8f8b23f612a6" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.563479] env[61440]: DEBUG nova.network.neutron [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Refreshing network info cache for port 9b49474d-7d64-4841-bbe5-13f6b3c6a813 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 568.062742] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "f152a563-2988-4fac-9974-af25e17f14d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.062742] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "f152a563-2988-4fac-9974-af25e17f14d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.281902] env[61440]: DEBUG nova.network.neutron [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Updated VIF entry in instance network info cache for port 9b49474d-7d64-4841-bbe5-13f6b3c6a813. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 568.282278] env[61440]: DEBUG nova.network.neutron [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Updating instance_info_cache with network_info: [{"id": "9b49474d-7d64-4841-bbe5-13f6b3c6a813", "address": "fa:16:3e:5b:69:03", "network": {"id": "fcf6a284-5561-4015-a08c-9ea5ed0cdcaa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1432724510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2701e3de21f42a7a286b54ec498da89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b49474d-7d", "ovs_interfaceid": "9b49474d-7d64-4841-bbe5-13f6b3c6a813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.297675] env[61440]: DEBUG oslo_concurrency.lockutils [req-bc47704f-96d3-4b52-bb99-9420a6d05def req-a3f444d2-a5ba-4a90-9068-d9defdf9dc55 service nova] Releasing lock "refresh_cache-a84b19ff-892b-43cb-9fd7-8f8b23f612a6" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.334140] env[61440]: DEBUG nova.compute.manager [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Received event network-changed-2c00ce65-32da-4259-8a15-d2825bfee5eb {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 570.334140] env[61440]: DEBUG nova.compute.manager [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Refreshing instance network info cache due to event network-changed-2c00ce65-32da-4259-8a15-d2825bfee5eb. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 570.334140] env[61440]: DEBUG oslo_concurrency.lockutils [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] Acquiring lock "refresh_cache-f2e370a1-6644-4794-8c9c-0ac9d7a4c156" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.334140] env[61440]: DEBUG oslo_concurrency.lockutils [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] Acquired lock "refresh_cache-f2e370a1-6644-4794-8c9c-0ac9d7a4c156" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.334442] env[61440]: DEBUG nova.network.neutron [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Refreshing network info cache for port 2c00ce65-32da-4259-8a15-d2825bfee5eb {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 570.852057] env[61440]: DEBUG nova.network.neutron [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Updated VIF entry in instance network info cache for port 2c00ce65-32da-4259-8a15-d2825bfee5eb. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 570.852057] env[61440]: DEBUG nova.network.neutron [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Updating instance_info_cache with network_info: [{"id": "2c00ce65-32da-4259-8a15-d2825bfee5eb", "address": "fa:16:3e:af:3f:7c", "network": {"id": "73748327-c796-4a9e-a583-c7e8ce60c1cf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-164031856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db06e739a2d34a9ebc3fbd6bd14c3ca5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c00ce65-32", "ovs_interfaceid": "2c00ce65-32da-4259-8a15-d2825bfee5eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.863998] env[61440]: DEBUG oslo_concurrency.lockutils [req-0cc551f9-c870-4a77-8114-e7a8792e8a2a req-d0cf525e-972e-44c2-a4a9-3716e2b2a5cf service nova] Releasing lock "refresh_cache-f2e370a1-6644-4794-8c9c-0ac9d7a4c156" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.328635] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8aa1a5e9-7e17-4d6e-b779-d1bc16585272 tempest-VolumesAssistedSnapshotsTest-2106796437 tempest-VolumesAssistedSnapshotsTest-2106796437-project-member] Acquiring lock "766a9405-8a7d-4876-8569-964d2e73fedb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.328949] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8aa1a5e9-7e17-4d6e-b779-d1bc16585272 tempest-VolumesAssistedSnapshotsTest-2106796437 tempest-VolumesAssistedSnapshotsTest-2106796437-project-member] Lock "766a9405-8a7d-4876-8569-964d2e73fedb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.784737] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a6b92dae-08b8-49f8-95c8-2f6d764b7846 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] Acquiring lock "9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.785574] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a6b92dae-08b8-49f8-95c8-2f6d764b7846 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] Lock "9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.927225] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e2c138d8-73be-4ead-ac57-7c88da84f030 tempest-ServersWithSpecificFlavorTestJSON-1644472028 tempest-ServersWithSpecificFlavorTestJSON-1644472028-project-member] Acquiring lock "2d57a929-eb61-471d-b0a1-d1e366201ccc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.927420] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e2c138d8-73be-4ead-ac57-7c88da84f030 tempest-ServersWithSpecificFlavorTestJSON-1644472028 tempest-ServersWithSpecificFlavorTestJSON-1644472028-project-member] Lock "2d57a929-eb61-471d-b0a1-d1e366201ccc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.825078] env[61440]: DEBUG oslo_concurrency.lockutils [None req-63ff02c4-c79d-48ac-afb4-72c6f969fe3c tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] Acquiring lock "56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.825391] env[61440]: DEBUG oslo_concurrency.lockutils [None req-63ff02c4-c79d-48ac-afb4-72c6f969fe3c tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] Lock "56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.560944] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f39d503d-726c-4392-a5cb-9bda41f75cd2 tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] Acquiring lock "0a80de0b-a914-443b-be18-a13b0eda231e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.560944] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f39d503d-726c-4392-a5cb-9bda41f75cd2 tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] Lock "0a80de0b-a914-443b-be18-a13b0eda231e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.762881] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e30b274c-6cf2-4794-8dc7-0fd5d63ece1c tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] Acquiring lock "e6a7a2d1-a50d-478c-9c27-fe58504fa14b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.768223] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e30b274c-6cf2-4794-8dc7-0fd5d63ece1c tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] Lock "e6a7a2d1-a50d-478c-9c27-fe58504fa14b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.926088] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7feb6e47-0c97-4720-a2dc-2222b433c04f tempest-ServersAaction247Test-122133342 tempest-ServersAaction247Test-122133342-project-member] Acquiring lock "53a53e12-741a-4104-91d1-8d41f2b490ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.926088] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7feb6e47-0c97-4720-a2dc-2222b433c04f tempest-ServersAaction247Test-122133342 tempest-ServersAaction247Test-122133342-project-member] Lock "53a53e12-741a-4104-91d1-8d41f2b490ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.154666] env[61440]: DEBUG oslo_concurrency.lockutils [None req-66ead4ad-257f-4867-ae78-e7f6589e9221 tempest-FloatingIPsAssociationTestJSON-1509650545 tempest-FloatingIPsAssociationTestJSON-1509650545-project-member] Acquiring lock "20a0ddf2-83bc-4e56-8208-12bb200c26e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.154666] env[61440]: DEBUG oslo_concurrency.lockutils [None req-66ead4ad-257f-4867-ae78-e7f6589e9221 tempest-FloatingIPsAssociationTestJSON-1509650545 tempest-FloatingIPsAssociationTestJSON-1509650545-project-member] Lock "20a0ddf2-83bc-4e56-8208-12bb200c26e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.080836] env[61440]: WARNING oslo_vmware.rw_handles [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 593.080836] env[61440]: ERROR oslo_vmware.rw_handles [ 593.081243] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 593.082836] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 593.083297] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Copying Virtual Disk [datastore2] vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/df4f8261-2c10-4f31-b39f-a9f0621d15de/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 593.083585] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f4af37c-5e80-40f5-9a44-b99fe798b35d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.095727] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Waiting for the task: (returnval){ [ 593.095727] env[61440]: value = "task-4281238" [ 593.095727] env[61440]: _type = "Task" [ 593.095727] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.101312] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Task: {'id': task-4281238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.606080] env[61440]: DEBUG oslo_vmware.exceptions [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 593.606466] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.610412] env[61440]: ERROR nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 593.610412] env[61440]: Faults: ['InvalidArgument'] [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Traceback (most recent call last): [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] yield resources [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self.driver.spawn(context, instance, image_meta, [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self._fetch_image_if_missing(context, vi) [ 593.610412] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] image_cache(vi, tmp_image_ds_loc) [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] vm_util.copy_virtual_disk( [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] session._wait_for_task(vmdk_copy_task) [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] return self.wait_for_task(task_ref) [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] return evt.wait() [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] result = hub.switch() [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.611126] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] return self.greenlet.switch() [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self.f(*self.args, **self.kw) [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] raise exceptions.translate_fault(task_info.error) [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Faults: ['InvalidArgument'] [ 593.611722] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] [ 593.612053] env[61440]: INFO nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Terminating instance [ 593.613689] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.613815] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 593.614311] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "refresh_cache-ce0340f3-116c-4196-a5e1-ae1225f6c4b3" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.614470] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquired lock "refresh_cache-ce0340f3-116c-4196-a5e1-ae1225f6c4b3" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.614818] env[61440]: DEBUG nova.network.neutron [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.615746] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e637db87-dd8a-4888-9d52-03809da69c8c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.624327] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 593.624804] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 593.625991] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d63d102-e896-4acf-b5e8-c7add59a5128 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.635059] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Waiting for the task: (returnval){ [ 593.635059] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e01921-ac16-d64b-320c-3d78e8d4b031" [ 593.635059] env[61440]: _type = "Task" [ 593.635059] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.645055] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e01921-ac16-d64b-320c-3d78e8d4b031, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.659397] env[61440]: DEBUG nova.network.neutron [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.704289] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d1cdad2b-b45c-41ed-9207-a40e5b8ddb4a tempest-ServersTestBootFromVolume-1926183255 tempest-ServersTestBootFromVolume-1926183255-project-member] Acquiring lock "e4395e44-91f2-4f9b-a902-12859618f9cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.704528] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d1cdad2b-b45c-41ed-9207-a40e5b8ddb4a tempest-ServersTestBootFromVolume-1926183255 tempest-ServersTestBootFromVolume-1926183255-project-member] Lock "e4395e44-91f2-4f9b-a902-12859618f9cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.872277] env[61440]: DEBUG nova.network.neutron [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.885951] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Releasing lock "refresh_cache-ce0340f3-116c-4196-a5e1-ae1225f6c4b3" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.886418] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 593.886596] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 593.887786] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4d6751-6e65-45c5-84ba-4aeec5b3ebd9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.898592] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 593.898592] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ceef83fd-01fd-4b5a-8261-970fe06ca7b1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.931167] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 593.931462] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 593.931922] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Deleting the datastore file [datastore2] ce0340f3-116c-4196-a5e1-ae1225f6c4b3 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 593.932203] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f36718e-775c-419f-92fe-b1930551b65a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.939526] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Waiting for the task: (returnval){ [ 593.939526] env[61440]: value = "task-4281240" [ 593.939526] env[61440]: _type = "Task" [ 593.939526] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.948306] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Task: {'id': task-4281240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.145453] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 594.145851] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Creating directory with path [datastore2] vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 594.146273] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa3b360b-e4af-4de2-a1d3-20cbbac488d2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.158065] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Created directory with path [datastore2] vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 594.158281] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Fetch image to [datastore2] vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 594.158457] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 594.159323] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82932480-85cd-4c85-8442-fc4c7d73c9a2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.172480] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e007e6c9-e7d1-49d4-a3c9-b7894c3d24e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.183963] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3337f5-dc65-4ebb-a4cf-310a73875173 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.223512] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d9e2b4-f7ed-49ca-a26c-d983b6b4f04d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.229727] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6cf4a9cc-7450-4a29-affe-003d89caaf27 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.322547] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 594.407459] env[61440]: DEBUG oslo_vmware.rw_handles [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 594.487847] env[61440]: DEBUG oslo_vmware.rw_handles [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 594.487847] env[61440]: DEBUG oslo_vmware.rw_handles [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 594.491238] env[61440]: DEBUG oslo_vmware.api [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Task: {'id': task-4281240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04499} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.491499] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 594.491682] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 594.493064] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 594.493064] env[61440]: INFO nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 594.493064] env[61440]: DEBUG oslo.service.loopingcall [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.493064] env[61440]: DEBUG nova.compute.manager [-] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Skipping network deallocation for instance since networking was not requested. {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 594.496442] env[61440]: DEBUG nova.compute.claims [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 594.496442] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.496442] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.949602] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28499165-3c32-4aa5-80d5-21585595ec4d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.957938] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e213b77e-a54b-472d-bcba-47d7a0b00aa7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.989398] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d89e41-4f0b-44d6-bc76-6cf8f051cd5c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.996781] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c67d5e-a005-487d-85e8-7acde5976ba3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.011023] env[61440]: DEBUG nova.compute.provider_tree [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.022519] env[61440]: DEBUG nova.scheduler.client.report [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.050798] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.555s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.051026] env[61440]: ERROR nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 595.051026] env[61440]: Faults: ['InvalidArgument'] [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Traceback (most recent call last): [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self.driver.spawn(context, instance, image_meta, [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self._fetch_image_if_missing(context, vi) [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] image_cache(vi, tmp_image_ds_loc) [ 595.051026] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] vm_util.copy_virtual_disk( [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] session._wait_for_task(vmdk_copy_task) [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] return self.wait_for_task(task_ref) [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] return evt.wait() [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] result = hub.switch() [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] return self.greenlet.switch() [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 595.051387] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] self.f(*self.args, **self.kw) [ 595.051742] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 595.051742] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] raise exceptions.translate_fault(task_info.error) [ 595.051742] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 595.051742] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Faults: ['InvalidArgument'] [ 595.051742] env[61440]: ERROR nova.compute.manager [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] [ 595.051911] env[61440]: DEBUG nova.compute.utils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 595.059583] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Build of instance ce0340f3-116c-4196-a5e1-ae1225f6c4b3 was re-scheduled: A specified parameter was not correct: fileType [ 595.059583] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 595.060043] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 595.060286] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquiring lock "refresh_cache-ce0340f3-116c-4196-a5e1-ae1225f6c4b3" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.060430] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Acquired lock "refresh_cache-ce0340f3-116c-4196-a5e1-ae1225f6c4b3" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.060586] env[61440]: DEBUG nova.network.neutron [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 595.108513] env[61440]: DEBUG nova.network.neutron [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.281140] env[61440]: DEBUG nova.network.neutron [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.303823] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Releasing lock "refresh_cache-ce0340f3-116c-4196-a5e1-ae1225f6c4b3" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.304120] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 595.304330] env[61440]: DEBUG nova.compute.manager [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] Skipping network deallocation for instance since networking was not requested. {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 595.444858] env[61440]: INFO nova.scheduler.client.report [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Deleted allocations for instance ce0340f3-116c-4196-a5e1-ae1225f6c4b3 [ 595.471688] env[61440]: DEBUG oslo_concurrency.lockutils [None req-04e3849b-1c87-4059-9167-cd7f1c211bec tempest-ServersAdmin275Test-1388513127 tempest-ServersAdmin275Test-1388513127-project-member] Lock "ce0340f3-116c-4196-a5e1-ae1225f6c4b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.359s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.472877] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "ce0340f3-116c-4196-a5e1-ae1225f6c4b3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 49.857s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.476021] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ce0340f3-116c-4196-a5e1-ae1225f6c4b3] During sync_power_state the instance has a pending task (spawning). Skip. [ 595.476021] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "ce0340f3-116c-4196-a5e1-ae1225f6c4b3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.508021] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 595.572529] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.573634] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.574411] env[61440]: INFO nova.compute.claims [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.714963] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2b3988-ee75-48f1-933a-81b19bb484e6 tempest-ImagesNegativeTestJSON-1075041407 tempest-ImagesNegativeTestJSON-1075041407-project-member] Acquiring lock "276e62bf-fd35-47ed-b422-b45fb4a89ed2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.715210] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2b3988-ee75-48f1-933a-81b19bb484e6 tempest-ImagesNegativeTestJSON-1075041407 tempest-ImagesNegativeTestJSON-1075041407-project-member] Lock "276e62bf-fd35-47ed-b422-b45fb4a89ed2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.039568] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef025c2-41ac-4b6a-a332-de222fa1d621 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.047831] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4a87a8-006a-46c3-83d1-537c140f6b3f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.081534] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b244139-be93-41ff-9099-09e67189b04b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.089272] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83a6f7f-1fd9-482d-ae8f-2b474300b3d1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.102635] env[61440]: DEBUG nova.compute.provider_tree [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.115354] env[61440]: DEBUG nova.scheduler.client.report [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 596.136565] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.564s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.137283] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 596.190226] env[61440]: DEBUG nova.compute.utils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.192678] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 596.192678] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 596.213150] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 596.306618] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 596.351119] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 596.352033] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 596.352033] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.352033] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 596.352221] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.352221] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 596.352429] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 596.352615] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 596.352782] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 596.353587] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 596.353784] env[61440]: DEBUG nova.virt.hardware [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 596.354846] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239f426a-fda5-4ba5-a840-4d624d00a091 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.365301] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9c8941-ac6a-4bd8-ba1a-46eb40e64297 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.643138] env[61440]: DEBUG nova.policy [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79c2f56598a3400caeec25c54a92a083', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20a7ed402f3e45a9b2b01066b6510f75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 597.949792] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Successfully created port: 1c7ee987-0ff0-45bd-9488-d9e726e1a203 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.914285] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.914678] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.046739] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Successfully updated port: 1c7ee987-0ff0-45bd-9488-d9e726e1a203 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.062546] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "refresh_cache-2d853a6e-4c2f-401e-9088-54e82bec1150" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.063282] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquired lock "refresh_cache-2d853a6e-4c2f-401e-9088-54e82bec1150" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.063282] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.366771] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.794534] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Updating instance_info_cache with network_info: [{"id": "1c7ee987-0ff0-45bd-9488-d9e726e1a203", "address": "fa:16:3e:7a:56:54", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c7ee987-0f", "ovs_interfaceid": "1c7ee987-0ff0-45bd-9488-d9e726e1a203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.812644] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Releasing lock "refresh_cache-2d853a6e-4c2f-401e-9088-54e82bec1150" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.812644] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance network_info: |[{"id": "1c7ee987-0ff0-45bd-9488-d9e726e1a203", "address": "fa:16:3e:7a:56:54", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c7ee987-0f", "ovs_interfaceid": "1c7ee987-0ff0-45bd-9488-d9e726e1a203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 600.812791] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:56:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c7ee987-0ff0-45bd-9488-d9e726e1a203', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 600.827429] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Creating folder: Project (20a7ed402f3e45a9b2b01066b6510f75). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 600.828409] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3e364d1-ab7f-4ae8-89aa-8c2f9295f844 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.843397] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Created folder: Project (20a7ed402f3e45a9b2b01066b6510f75) in parent group-v843372. [ 600.843397] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Creating folder: Instances. Parent ref: group-v843407. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 600.843397] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d87568a-b73e-48f9-996b-0d197842ac42 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.855408] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Created folder: Instances in parent group-v843407. [ 600.855408] env[61440]: DEBUG oslo.service.loopingcall [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.855408] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 600.855408] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c90e03e-631c-4a5c-92b1-77c559afe85e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.877415] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 600.877415] env[61440]: value = "task-4281243" [ 600.877415] env[61440]: _type = "Task" [ 600.877415] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.885666] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281243, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.390261] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281243, 'name': CreateVM_Task, 'duration_secs': 0.345166} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.392512] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 601.392512] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.392512] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.392512] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 601.393693] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60c83ca2-c942-432c-b18a-4f38e8f9b96d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.403577] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Waiting for the task: (returnval){ [ 601.403577] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526c01d9-c69d-370b-ffc0-c3598b700a61" [ 601.403577] env[61440]: _type = "Task" [ 601.403577] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.413359] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526c01d9-c69d-370b-ffc0-c3598b700a61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.481516] env[61440]: DEBUG nova.compute.manager [req-ca6eefb4-3f41-4772-865c-1c5ea5e39916 req-2c53543a-a2b8-49b6-a5e8-48285bc51298 service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Received event network-vif-plugged-1c7ee987-0ff0-45bd-9488-d9e726e1a203 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 601.481742] env[61440]: DEBUG oslo_concurrency.lockutils [req-ca6eefb4-3f41-4772-865c-1c5ea5e39916 req-2c53543a-a2b8-49b6-a5e8-48285bc51298 service nova] Acquiring lock "2d853a6e-4c2f-401e-9088-54e82bec1150-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.481951] env[61440]: DEBUG oslo_concurrency.lockutils [req-ca6eefb4-3f41-4772-865c-1c5ea5e39916 req-2c53543a-a2b8-49b6-a5e8-48285bc51298 service nova] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.482136] env[61440]: DEBUG oslo_concurrency.lockutils [req-ca6eefb4-3f41-4772-865c-1c5ea5e39916 req-2c53543a-a2b8-49b6-a5e8-48285bc51298 service nova] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.482309] env[61440]: DEBUG nova.compute.manager [req-ca6eefb4-3f41-4772-865c-1c5ea5e39916 req-2c53543a-a2b8-49b6-a5e8-48285bc51298 service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] No waiting events found dispatching network-vif-plugged-1c7ee987-0ff0-45bd-9488-d9e726e1a203 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 601.482474] env[61440]: WARNING nova.compute.manager [req-ca6eefb4-3f41-4772-865c-1c5ea5e39916 req-2c53543a-a2b8-49b6-a5e8-48285bc51298 service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Received unexpected event network-vif-plugged-1c7ee987-0ff0-45bd-9488-d9e726e1a203 for instance with vm_state building and task_state spawning. [ 601.918545] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.918653] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 601.920094] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.672763] env[61440]: DEBUG oslo_concurrency.lockutils [None req-51652501-6bb5-4da5-957a-7388dbb48d10 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "3ca84343-76bb-46f4-89d8-3cc45ac3dc0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.673802] env[61440]: DEBUG oslo_concurrency.lockutils [None req-51652501-6bb5-4da5-957a-7388dbb48d10 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "3ca84343-76bb-46f4-89d8-3cc45ac3dc0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.082555] env[61440]: DEBUG oslo_concurrency.lockutils [None req-10e5627c-e436-4981-8b36-ede63ef21c48 tempest-ServersV294TestFqdnHostnames-1865455608 tempest-ServersV294TestFqdnHostnames-1865455608-project-member] Acquiring lock "c737971c-735e-4317-b0e6-eb73bfc8456a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.082883] env[61440]: DEBUG oslo_concurrency.lockutils [None req-10e5627c-e436-4981-8b36-ede63ef21c48 tempest-ServersV294TestFqdnHostnames-1865455608 tempest-ServersV294TestFqdnHostnames-1865455608-project-member] Lock "c737971c-735e-4317-b0e6-eb73bfc8456a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.509709] env[61440]: DEBUG nova.compute.manager [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Received event network-changed-1c7ee987-0ff0-45bd-9488-d9e726e1a203 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 605.510077] env[61440]: DEBUG nova.compute.manager [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Refreshing instance network info cache due to event network-changed-1c7ee987-0ff0-45bd-9488-d9e726e1a203. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 605.511206] env[61440]: DEBUG oslo_concurrency.lockutils [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] Acquiring lock "refresh_cache-2d853a6e-4c2f-401e-9088-54e82bec1150" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.511344] env[61440]: DEBUG oslo_concurrency.lockutils [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] Acquired lock "refresh_cache-2d853a6e-4c2f-401e-9088-54e82bec1150" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.511884] env[61440]: DEBUG nova.network.neutron [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Refreshing network info cache for port 1c7ee987-0ff0-45bd-9488-d9e726e1a203 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 605.912600] env[61440]: DEBUG nova.network.neutron [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Updated VIF entry in instance network info cache for port 1c7ee987-0ff0-45bd-9488-d9e726e1a203. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 605.913014] env[61440]: DEBUG nova.network.neutron [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Updating instance_info_cache with network_info: [{"id": "1c7ee987-0ff0-45bd-9488-d9e726e1a203", "address": "fa:16:3e:7a:56:54", "network": {"id": "89f2c4aa-c166-4ad4-af3c-44f02a3add5f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4dd68985b0414373a45d24938e7b9731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c7ee987-0f", "ovs_interfaceid": "1c7ee987-0ff0-45bd-9488-d9e726e1a203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.926178] env[61440]: DEBUG oslo_concurrency.lockutils [req-84e1e78f-8c4f-44ff-b7e4-543bf4349c87 req-8d812f49-aa60-4ec1-bc53-d675a607033a service nova] Releasing lock "refresh_cache-2d853a6e-4c2f-401e-9088-54e82bec1150" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.808855] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e60257bf-cdf4-4972-8f30-67ecfdeb3ac7 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] Acquiring lock "61c5fe8a-8fab-4fca-b03e-b583b2566162" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.809169] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e60257bf-cdf4-4972-8f30-67ecfdeb3ac7 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] Lock "61c5fe8a-8fab-4fca-b03e-b583b2566162" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.070393] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6e36ab58-1071-49b1-a512-c4f9e3e164ee tempest-ServersListShow296Test-1936214275 tempest-ServersListShow296Test-1936214275-project-member] Acquiring lock "a5eb3371-1899-44cb-ba30-44aa4d54b2ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.070393] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6e36ab58-1071-49b1-a512-c4f9e3e164ee tempest-ServersListShow296Test-1936214275 tempest-ServersListShow296Test-1936214275-project-member] Lock "a5eb3371-1899-44cb-ba30-44aa4d54b2ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.837443] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.870047] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.870047] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.870047] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.870047] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 612.277272] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 612.277272] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 612.277272] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 612.301161] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.301909] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.301909] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.302065] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.302233] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.302500] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.302743] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.302982] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.303237] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.303468] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 612.303626] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 612.304458] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.274457] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.274756] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.275012] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.275214] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.291762] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.291762] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.292135] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.292135] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 613.294039] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caea4354-e1d6-47ba-85fc-f2df7a9ee28f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.308012] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa55777-d4ca-4df7-b1ed-3dd8f4425d18 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.326987] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bdd47d-b1e6-4e7e-b9ef-87f65bf7465a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.339020] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cad234-3186-4514-87d7-dc71b98ecd2c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.368308] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180651MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 613.370787] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.370787] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.476713] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 33c87cb0-cd99-4c35-bcfa-899256be0460 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.476713] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 85c2cfe1-443a-4373-bdba-b2a957a8681b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.476713] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.476713] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f59b0b04-643c-497c-90a0-a7f885c1eb3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.476953] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 94b6a93d-de4d-4600-94af-81dce16b22f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.476953] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.477082] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.477179] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.477292] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.477402] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 613.513804] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.550445] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.570023] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.610056] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 766a9405-8a7d-4876-8569-964d2e73fedb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.628480] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.659054] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d57a929-eb61-471d-b0a1-d1e366201ccc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.674104] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.694155] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0a80de0b-a914-443b-be18-a13b0eda231e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.709813] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6a7a2d1-a50d-478c-9c27-fe58504fa14b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.725216] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a53e12-741a-4104-91d1-8d41f2b490ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.738923] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20a0ddf2-83bc-4e56-8208-12bb200c26e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.751832] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e4395e44-91f2-4f9b-a902-12859618f9cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.767575] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 276e62bf-fd35-47ed-b422-b45fb4a89ed2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.784577] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.797724] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3ca84343-76bb-46f4-89d8-3cc45ac3dc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.811136] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c737971c-735e-4317-b0e6-eb73bfc8456a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.827463] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 61c5fe8a-8fab-4fca-b03e-b583b2566162 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.838635] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a5eb3371-1899-44cb-ba30-44aa4d54b2ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.838892] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 613.839649] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 614.364309] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0381a75-a46f-4b65-9896-bd24e142e508 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.374266] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b358ef41-6b7c-4028-9a3c-2c91bb06a229 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.411199] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b414096-61d0-454b-839e-5a8a83e89c33 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.419517] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7698db4-b088-4464-b831-f077e87acb00 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.436913] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.446454] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.466140] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 614.466140] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.096s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.630703] env[61440]: DEBUG oslo_concurrency.lockutils [None req-089f0975-389d-4a4d-9a66-eaaf638dacc0 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "697179bb-2391-4434-8144-ee917aa84441" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.631069] env[61440]: DEBUG oslo_concurrency.lockutils [None req-089f0975-389d-4a4d-9a66-eaaf638dacc0 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "697179bb-2391-4434-8144-ee917aa84441" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.216422] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a05aa013-816e-4894-ba46-8a3babce2e95 tempest-ServersTestJSON-1279785354 tempest-ServersTestJSON-1279785354-project-member] Acquiring lock "d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.216647] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a05aa013-816e-4894-ba46-8a3babce2e95 tempest-ServersTestJSON-1279785354 tempest-ServersTestJSON-1279785354-project-member] Lock "d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.168018] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1544c9f0-d684-41d6-b3e8-7a0183e9daa8 tempest-ServersTestManualDisk-1353772146 tempest-ServersTestManualDisk-1353772146-project-member] Acquiring lock "a91235be-0b5f-4b2a-8da3-569ec393305a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.168018] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1544c9f0-d684-41d6-b3e8-7a0183e9daa8 tempest-ServersTestManualDisk-1353772146 tempest-ServersTestManualDisk-1353772146-project-member] Lock "a91235be-0b5f-4b2a-8da3-569ec393305a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.139107] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5ffe4c69-5518-43d6-8330-826943160e12 tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] Acquiring lock "f0ad826d-58d7-4a52-8767-4609170d964d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.139369] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5ffe4c69-5518-43d6-8330-826943160e12 tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] Lock "f0ad826d-58d7-4a52-8767-4609170d964d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.308685] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e5ab2e1-51a6-496e-9728-cdebe0110fef tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] Acquiring lock "d8cc1718-c721-478b-807a-d6ae1eb09c7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.308685] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e5ab2e1-51a6-496e-9728-cdebe0110fef tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] Lock "d8cc1718-c721-478b-807a-d6ae1eb09c7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.918082] env[61440]: WARNING oslo_vmware.rw_handles [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 643.918082] env[61440]: ERROR oslo_vmware.rw_handles [ 643.918082] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 643.919945] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 643.920232] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Copying Virtual Disk [datastore2] vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/6309efaa-bf73-4bc8-b99e-71607424796b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 643.920534] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5a3cd03-d90e-414c-b465-23ee8ba15d6e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.928938] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Waiting for the task: (returnval){ [ 643.928938] env[61440]: value = "task-4281244" [ 643.928938] env[61440]: _type = "Task" [ 643.928938] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.937424] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Task: {'id': task-4281244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.439100] env[61440]: DEBUG oslo_vmware.exceptions [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 644.439431] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.439989] env[61440]: ERROR nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 644.439989] env[61440]: Faults: ['InvalidArgument'] [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Traceback (most recent call last): [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] yield resources [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self.driver.spawn(context, instance, image_meta, [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self._vmops.spawn(context, instance, image_meta, injected_files, [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self._fetch_image_if_missing(context, vi) [ 644.439989] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] image_cache(vi, tmp_image_ds_loc) [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] vm_util.copy_virtual_disk( [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] session._wait_for_task(vmdk_copy_task) [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] return self.wait_for_task(task_ref) [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] return evt.wait() [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] result = hub.switch() [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 644.440328] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] return self.greenlet.switch() [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self.f(*self.args, **self.kw) [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] raise exceptions.translate_fault(task_info.error) [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Faults: ['InvalidArgument'] [ 644.440657] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] [ 644.440657] env[61440]: INFO nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Terminating instance [ 644.441928] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.442153] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.442393] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f67c7e4-75a5-4906-bc4b-afa581c53857 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.446028] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 644.446385] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 644.446959] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45eed36-d97f-47d5-97e6-d96407557e2c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.453885] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 644.454143] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd10d3fa-5fe9-4d36-b99c-2524073bb776 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.456435] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.456612] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 644.457569] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a450b110-1968-4b74-b93b-a02c1bd5bc51 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.466170] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Waiting for the task: (returnval){ [ 644.466170] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524d5bcf-307c-44a7-9fec-eea321e40af9" [ 644.466170] env[61440]: _type = "Task" [ 644.466170] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.476390] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 644.476634] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Creating directory with path [datastore2] vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.476849] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4c61162-dd09-4942-b150-2a799307a4a7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.497952] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Created directory with path [datastore2] vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.498186] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Fetch image to [datastore2] vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 644.498373] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 644.499144] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1756ea74-fa40-403f-8143-d145b5d9c41b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.505675] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8935fdda-e6d6-45a4-9aa0-553e7ea4d1a7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.514709] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247bd074-8334-42ed-9ab8-439a4dd5c76c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.546079] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270cb5e1-ebec-49f1-a896-c1887f59c991 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.548727] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 644.548931] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 644.549122] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Deleting the datastore file [datastore2] 33c87cb0-cd99-4c35-bcfa-899256be0460 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 644.549360] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25e1c1c2-f9ec-4064-8f1b-ce0af73dc9bd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.554022] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1659bf6a-3a54-4aee-a4c1-2fad83c4f009 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.556896] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Waiting for the task: (returnval){ [ 644.556896] env[61440]: value = "task-4281246" [ 644.556896] env[61440]: _type = "Task" [ 644.556896] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.563923] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Task: {'id': task-4281246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.575540] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 644.624470] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 644.682135] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 644.682386] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 645.069548] env[61440]: DEBUG oslo_vmware.api [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Task: {'id': task-4281246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068146} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.069810] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 645.069994] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 645.070183] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 645.070360] env[61440]: INFO nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Took 0.62 seconds to destroy the instance on the hypervisor. [ 645.072490] env[61440]: DEBUG nova.compute.claims [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 645.072668] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.072925] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.484215] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ecd373-0bab-406e-a7e8-3a84dc601d77 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.491547] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90e98b3-90e9-488a-84e5-6d187ce39011 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.520307] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254b59a6-af48-4c81-a7c5-d7cf4f137ac1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.527534] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0b599a-468a-42e6-9f62-140dfc43fc85 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.540134] env[61440]: DEBUG nova.compute.provider_tree [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.548592] env[61440]: DEBUG nova.scheduler.client.report [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 645.564889] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.492s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.565171] env[61440]: ERROR nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 645.565171] env[61440]: Faults: ['InvalidArgument'] [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Traceback (most recent call last): [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self.driver.spawn(context, instance, image_meta, [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self._fetch_image_if_missing(context, vi) [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] image_cache(vi, tmp_image_ds_loc) [ 645.565171] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] vm_util.copy_virtual_disk( [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] session._wait_for_task(vmdk_copy_task) [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] return self.wait_for_task(task_ref) [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] return evt.wait() [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] result = hub.switch() [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] return self.greenlet.switch() [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 645.565479] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] self.f(*self.args, **self.kw) [ 645.565740] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 645.565740] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] raise exceptions.translate_fault(task_info.error) [ 645.565740] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 645.565740] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Faults: ['InvalidArgument'] [ 645.565740] env[61440]: ERROR nova.compute.manager [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] [ 645.566081] env[61440]: DEBUG nova.compute.utils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.567560] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Build of instance 33c87cb0-cd99-4c35-bcfa-899256be0460 was re-scheduled: A specified parameter was not correct: fileType [ 645.567560] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 645.567928] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 645.568130] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 645.568306] env[61440]: DEBUG nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 645.568471] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 645.963745] env[61440]: DEBUG nova.network.neutron [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.973417] env[61440]: INFO nova.compute.manager [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] Took 0.40 seconds to deallocate network for instance. [ 646.073008] env[61440]: INFO nova.scheduler.client.report [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Deleted allocations for instance 33c87cb0-cd99-4c35-bcfa-899256be0460 [ 646.101902] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59ece803-6a97-481a-ab74-d646e3ed5023 tempest-TenantUsagesTestJSON-481795896 tempest-TenantUsagesTestJSON-481795896-project-member] Lock "33c87cb0-cd99-4c35-bcfa-899256be0460" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.354s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.103097] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "33c87cb0-cd99-4c35-bcfa-899256be0460" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 100.488s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.103305] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 33c87cb0-cd99-4c35-bcfa-899256be0460] During sync_power_state the instance has a pending task (spawning). Skip. [ 646.103527] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "33c87cb0-cd99-4c35-bcfa-899256be0460" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.120225] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 646.169256] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.169512] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.170989] env[61440]: INFO nova.compute.claims [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.627379] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3209e1a-e92b-4369-bf11-c6356ca7e307 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.636048] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3036401b-8f4e-4275-b709-abfb1a0d234c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.677601] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c71a40a-2ded-4d56-9dc7-8b7ade4726f9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.685388] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d185a2d-7d23-4a63-b72e-273af3fe8e40 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.698499] env[61440]: DEBUG nova.compute.provider_tree [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.708393] env[61440]: DEBUG nova.scheduler.client.report [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.726541] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.557s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.726963] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 646.762912] env[61440]: DEBUG nova.compute.utils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.764203] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 646.764464] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.777571] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 646.843215] env[61440]: DEBUG nova.policy [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6c5e555721d40c6875c93fa6dad1434', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db06e739a2d34a9ebc3fbd6bd14c3ca5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 646.853038] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 646.884789] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.885347] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.885706] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.885999] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.886275] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.886527] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.886836] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.887111] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.887383] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.887708] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.888023] env[61440]: DEBUG nova.virt.hardware [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.889242] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec5edab-abe1-4449-9b24-e1a097fc2d72 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.897734] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da557c94-ca37-4e30-936e-2331a80e65d3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.314406] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Successfully created port: de4a39d3-fd1d-49c9-b112-3137357e0b8b {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.441746] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Successfully updated port: de4a39d3-fd1d-49c9-b112-3137357e0b8b {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.456131] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "refresh_cache-3395aaef-0db6-4fab-b8a5-79b781129690" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.456280] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired lock "refresh_cache-3395aaef-0db6-4fab-b8a5-79b781129690" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.456485] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 648.504799] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.587808] env[61440]: DEBUG nova.compute.manager [req-ce988dee-15ac-43b1-afb6-4e3a9091be5e req-22c31d2c-4290-4a2a-af8f-d80f1122110e service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Received event network-vif-plugged-de4a39d3-fd1d-49c9-b112-3137357e0b8b {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 648.587808] env[61440]: DEBUG oslo_concurrency.lockutils [req-ce988dee-15ac-43b1-afb6-4e3a9091be5e req-22c31d2c-4290-4a2a-af8f-d80f1122110e service nova] Acquiring lock "3395aaef-0db6-4fab-b8a5-79b781129690-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.587964] env[61440]: DEBUG oslo_concurrency.lockutils [req-ce988dee-15ac-43b1-afb6-4e3a9091be5e req-22c31d2c-4290-4a2a-af8f-d80f1122110e service nova] Lock "3395aaef-0db6-4fab-b8a5-79b781129690-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.588014] env[61440]: DEBUG oslo_concurrency.lockutils [req-ce988dee-15ac-43b1-afb6-4e3a9091be5e req-22c31d2c-4290-4a2a-af8f-d80f1122110e service nova] Lock "3395aaef-0db6-4fab-b8a5-79b781129690-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.588198] env[61440]: DEBUG nova.compute.manager [req-ce988dee-15ac-43b1-afb6-4e3a9091be5e req-22c31d2c-4290-4a2a-af8f-d80f1122110e service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] No waiting events found dispatching network-vif-plugged-de4a39d3-fd1d-49c9-b112-3137357e0b8b {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 648.588364] env[61440]: WARNING nova.compute.manager [req-ce988dee-15ac-43b1-afb6-4e3a9091be5e req-22c31d2c-4290-4a2a-af8f-d80f1122110e service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Received unexpected event network-vif-plugged-de4a39d3-fd1d-49c9-b112-3137357e0b8b for instance with vm_state building and task_state spawning. [ 648.861929] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Updating instance_info_cache with network_info: [{"id": "de4a39d3-fd1d-49c9-b112-3137357e0b8b", "address": "fa:16:3e:f8:a9:c2", "network": {"id": "73748327-c796-4a9e-a583-c7e8ce60c1cf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-164031856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db06e739a2d34a9ebc3fbd6bd14c3ca5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4a39d3-fd", "ovs_interfaceid": "de4a39d3-fd1d-49c9-b112-3137357e0b8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.873904] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Releasing lock "refresh_cache-3395aaef-0db6-4fab-b8a5-79b781129690" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.874199] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance network_info: |[{"id": "de4a39d3-fd1d-49c9-b112-3137357e0b8b", "address": "fa:16:3e:f8:a9:c2", "network": {"id": "73748327-c796-4a9e-a583-c7e8ce60c1cf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-164031856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db06e739a2d34a9ebc3fbd6bd14c3ca5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4a39d3-fd", "ovs_interfaceid": "de4a39d3-fd1d-49c9-b112-3137357e0b8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.874601] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:a9:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de4a39d3-fd1d-49c9-b112-3137357e0b8b', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.882551] env[61440]: DEBUG oslo.service.loopingcall [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.883155] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 648.883389] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10d364bf-21ad-4160-b705-5c91f66651bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.904769] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 648.904769] env[61440]: value = "task-4281247" [ 648.904769] env[61440]: _type = "Task" [ 648.904769] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.914273] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281247, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.416144] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281247, 'name': CreateVM_Task, 'duration_secs': 0.293461} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.416423] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 649.417277] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.417615] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.418049] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 649.418438] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fff0da90-e58b-4faf-a197-9d1797502479 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.425534] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 649.425534] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5251f325-d799-1e7f-a1c8-c732be8f0373" [ 649.425534] env[61440]: _type = "Task" [ 649.425534] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.432311] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5251f325-d799-1e7f-a1c8-c732be8f0373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.935248] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.935248] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.935248] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.780475] env[61440]: DEBUG nova.compute.manager [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Received event network-changed-de4a39d3-fd1d-49c9-b112-3137357e0b8b {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 650.780719] env[61440]: DEBUG nova.compute.manager [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Refreshing instance network info cache due to event network-changed-de4a39d3-fd1d-49c9-b112-3137357e0b8b. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 650.780933] env[61440]: DEBUG oslo_concurrency.lockutils [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] Acquiring lock "refresh_cache-3395aaef-0db6-4fab-b8a5-79b781129690" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.781629] env[61440]: DEBUG oslo_concurrency.lockutils [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] Acquired lock "refresh_cache-3395aaef-0db6-4fab-b8a5-79b781129690" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.781870] env[61440]: DEBUG nova.network.neutron [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Refreshing network info cache for port de4a39d3-fd1d-49c9-b112-3137357e0b8b {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 651.558207] env[61440]: DEBUG nova.network.neutron [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Updated VIF entry in instance network info cache for port de4a39d3-fd1d-49c9-b112-3137357e0b8b. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 651.558723] env[61440]: DEBUG nova.network.neutron [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Updating instance_info_cache with network_info: [{"id": "de4a39d3-fd1d-49c9-b112-3137357e0b8b", "address": "fa:16:3e:f8:a9:c2", "network": {"id": "73748327-c796-4a9e-a583-c7e8ce60c1cf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-164031856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db06e739a2d34a9ebc3fbd6bd14c3ca5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4a39d3-fd", "ovs_interfaceid": "de4a39d3-fd1d-49c9-b112-3137357e0b8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.570961] env[61440]: DEBUG oslo_concurrency.lockutils [req-4d333726-ac9d-4462-abf4-d28851421f47 req-b69ec9b8-0368-4345-9873-c5dc1e9bb3b8 service nova] Releasing lock "refresh_cache-3395aaef-0db6-4fab-b8a5-79b781129690" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.970096] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "23b7562f-035c-487f-a1f2-279b69ca4355" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.970367] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "23b7562f-035c-487f-a1f2-279b69ca4355" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.465112] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.465112] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 673.465112] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 673.485500] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.485665] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.485799] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.485926] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486063] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486191] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486311] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486430] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486549] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486666] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 673.486785] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 673.487286] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.487519] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.487687] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.487838] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.487980] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.488136] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.488265] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 674.293973] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.274353] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.286301] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.286445] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.286622] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.286840] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 675.288122] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf43ef7-b34c-474c-855f-1de98693c9c7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.298040] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9736f8-1e91-41c8-80d1-b7ad0668108c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.312485] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3927ac-4c12-4af6-90f3-9e929f7d30d5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.319389] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54218e59-ec4e-4280-99b1-5ab7a62edd6b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.349519] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180681MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 675.349676] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.349871] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.424903] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 85c2cfe1-443a-4373-bdba-b2a957a8681b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.424903] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425050] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f59b0b04-643c-497c-90a0-a7f885c1eb3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425094] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 94b6a93d-de4d-4600-94af-81dce16b22f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425220] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425339] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425459] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425627] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425701] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.425814] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 675.437719] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.448274] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.458553] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 766a9405-8a7d-4876-8569-964d2e73fedb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.469027] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.479393] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d57a929-eb61-471d-b0a1-d1e366201ccc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.489484] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.498722] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0a80de0b-a914-443b-be18-a13b0eda231e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.508784] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6a7a2d1-a50d-478c-9c27-fe58504fa14b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.517961] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a53e12-741a-4104-91d1-8d41f2b490ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.527111] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20a0ddf2-83bc-4e56-8208-12bb200c26e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.536842] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e4395e44-91f2-4f9b-a902-12859618f9cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.546048] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 276e62bf-fd35-47ed-b422-b45fb4a89ed2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.555608] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.564487] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3ca84343-76bb-46f4-89d8-3cc45ac3dc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.574226] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c737971c-735e-4317-b0e6-eb73bfc8456a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.583163] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 61c5fe8a-8fab-4fca-b03e-b583b2566162 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.592512] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a5eb3371-1899-44cb-ba30-44aa4d54b2ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.602084] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 697179bb-2391-4434-8144-ee917aa84441 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.610518] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.619689] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a91235be-0b5f-4b2a-8da3-569ec393305a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.629274] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f0ad826d-58d7-4a52-8767-4609170d964d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.639032] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d8cc1718-c721-478b-807a-d6ae1eb09c7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.648713] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.648963] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 675.649127] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 676.036758] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee09ab69-af91-4e2a-810e-36190dfcf322 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.044742] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b041d0cb-03fd-4e6f-80c9-a76ce371388e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.074292] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427fa124-bdd7-4226-a723-0b1af8da2cfc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.081532] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8456e57a-9603-430c-baf7-72a6d7e327e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.095033] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.103147] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.118207] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 676.118634] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.768s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.120094] env[61440]: WARNING oslo_vmware.rw_handles [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 693.120094] env[61440]: ERROR oslo_vmware.rw_handles [ 693.120716] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 693.121642] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 693.121904] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Copying Virtual Disk [datastore2] vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/baad0511-04c5-4068-9f8c-f9b85788bdb4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 693.122238] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b690426-63ac-478e-914e-dc54c6ec9044 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.130313] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Waiting for the task: (returnval){ [ 693.130313] env[61440]: value = "task-4281248" [ 693.130313] env[61440]: _type = "Task" [ 693.130313] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.138118] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Task: {'id': task-4281248, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.641213] env[61440]: DEBUG oslo_vmware.exceptions [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 693.641511] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.642812] env[61440]: ERROR nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 693.642812] env[61440]: Faults: ['InvalidArgument'] [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Traceback (most recent call last): [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] yield resources [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self.driver.spawn(context, instance, image_meta, [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self._fetch_image_if_missing(context, vi) [ 693.642812] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] image_cache(vi, tmp_image_ds_loc) [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] vm_util.copy_virtual_disk( [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] session._wait_for_task(vmdk_copy_task) [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] return self.wait_for_task(task_ref) [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] return evt.wait() [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] result = hub.switch() [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.643167] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] return self.greenlet.switch() [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self.f(*self.args, **self.kw) [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] raise exceptions.translate_fault(task_info.error) [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Faults: ['InvalidArgument'] [ 693.643533] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] [ 693.643533] env[61440]: INFO nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Terminating instance [ 693.646506] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.646506] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.646506] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72e3180f-46d5-4e89-a825-0cbf875ea3a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.650233] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 693.650233] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 693.650421] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997cbe6d-2a67-46f2-ad66-2158466b2050 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.655603] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.655811] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 693.657247] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-890191cb-185e-487f-b0f7-2aa4cbcd4178 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.661356] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 693.661848] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-322e6eec-1e73-42e4-9db5-eb676edf3ec1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.664167] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Waiting for the task: (returnval){ [ 693.664167] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52197371-817e-c637-4dba-d96825238094" [ 693.664167] env[61440]: _type = "Task" [ 693.664167] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.675953] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52197371-817e-c637-4dba-d96825238094, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.733370] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 693.733584] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 693.733761] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Deleting the datastore file [datastore2] 85c2cfe1-443a-4373-bdba-b2a957a8681b {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 693.734066] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42ed10e2-aa44-4943-9e3e-78013cfdb585 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.739916] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Waiting for the task: (returnval){ [ 693.739916] env[61440]: value = "task-4281250" [ 693.739916] env[61440]: _type = "Task" [ 693.739916] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.747746] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Task: {'id': task-4281250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.174710] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 694.175017] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Creating directory with path [datastore2] vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.175210] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0157b626-f154-48af-a650-6cd1cba355c2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.186409] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Created directory with path [datastore2] vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.186594] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Fetch image to [datastore2] vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 694.186760] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 694.187516] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2aa9111-a905-4bfc-bb56-afda404a5990 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.193975] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adee2e38-afe8-438d-a90b-b5a827fb11e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.203087] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11b28c1-3edf-4cb9-a9f8-e8d958b28ad4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.232816] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54165e5e-2d9d-43b1-bfd2-0f5dc93caac8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.238351] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cdc4965c-339f-4143-8c3d-5ad6cbd4d407 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.247267] env[61440]: DEBUG oslo_vmware.api [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Task: {'id': task-4281250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06917} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.247503] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.247705] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 694.247899] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 694.248088] env[61440]: INFO nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 694.250148] env[61440]: DEBUG nova.compute.claims [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.250317] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.250525] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.257591] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 694.313309] env[61440]: DEBUG oslo_vmware.rw_handles [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 694.375469] env[61440]: DEBUG oslo_vmware.rw_handles [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 694.375469] env[61440]: DEBUG oslo_vmware.rw_handles [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 694.757567] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27de823-e19b-4c55-9a5e-357670a1800a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.766327] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09d456b-4c89-4bef-9439-feec12605464 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.796061] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a373bc9-7e0e-4f10-8410-48b599ef8a05 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.803129] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb55ab7-9c06-496b-8ecf-31b49e71ba9b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.815844] env[61440]: DEBUG nova.compute.provider_tree [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.826410] env[61440]: DEBUG nova.scheduler.client.report [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 694.841562] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.591s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.842149] env[61440]: ERROR nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 694.842149] env[61440]: Faults: ['InvalidArgument'] [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Traceback (most recent call last): [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self.driver.spawn(context, instance, image_meta, [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self._fetch_image_if_missing(context, vi) [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] image_cache(vi, tmp_image_ds_loc) [ 694.842149] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] vm_util.copy_virtual_disk( [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] session._wait_for_task(vmdk_copy_task) [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] return self.wait_for_task(task_ref) [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] return evt.wait() [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] result = hub.switch() [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] return self.greenlet.switch() [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 694.842433] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] self.f(*self.args, **self.kw) [ 694.842740] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 694.842740] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] raise exceptions.translate_fault(task_info.error) [ 694.842740] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 694.842740] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Faults: ['InvalidArgument'] [ 694.842740] env[61440]: ERROR nova.compute.manager [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] [ 694.842855] env[61440]: DEBUG nova.compute.utils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 694.844432] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Build of instance 85c2cfe1-443a-4373-bdba-b2a957a8681b was re-scheduled: A specified parameter was not correct: fileType [ 694.844432] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 694.844819] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 694.844993] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 694.845207] env[61440]: DEBUG nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 694.845343] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 695.222305] env[61440]: DEBUG nova.network.neutron [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.235618] env[61440]: INFO nova.compute.manager [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] Took 0.39 seconds to deallocate network for instance. [ 695.377630] env[61440]: INFO nova.scheduler.client.report [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Deleted allocations for instance 85c2cfe1-443a-4373-bdba-b2a957a8681b [ 695.404401] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f6db9c57-8541-44e7-acb5-9979428123f9 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732 tempest-FloatingIPsAssociationNegativeTestJSON-1448884732-project-member] Lock "85c2cfe1-443a-4373-bdba-b2a957a8681b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.168s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.405610] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "85c2cfe1-443a-4373-bdba-b2a957a8681b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 149.791s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.405806] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 85c2cfe1-443a-4373-bdba-b2a957a8681b] During sync_power_state the instance has a pending task (spawning). Skip. [ 695.405978] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "85c2cfe1-443a-4373-bdba-b2a957a8681b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.421874] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 695.485528] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.485898] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.487299] env[61440]: INFO nova.compute.claims [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.920798] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfeee562-fcde-4403-b52f-c28703019224 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.929065] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1449cb-24fc-4a8e-8828-c66fe3b9eab2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.958408] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1d7d95-eaec-4554-b1bd-684b264640fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.965635] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeb5dff-6f38-46bd-83a3-a72ed963641c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.978684] env[61440]: DEBUG nova.compute.provider_tree [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.989034] env[61440]: DEBUG nova.scheduler.client.report [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.008481] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.523s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.009033] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 696.043981] env[61440]: DEBUG nova.compute.utils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.045421] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 696.045592] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 696.054552] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 696.121896] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 696.125394] env[61440]: DEBUG nova.policy [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed570ed919df49c8aef36cd9734c27bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44c73ba2f5de42c487100aa2a674976a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 696.149551] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 696.149825] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 696.149985] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.150185] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 696.150332] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.150475] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 696.150678] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 696.150836] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 696.151006] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 696.151176] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 696.151347] env[61440]: DEBUG nova.virt.hardware [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 696.152218] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69093bef-a56d-4214-989d-b1fbc61e706e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.160089] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62efc4c-4d5c-421d-b1c1-5fe2ca5d827d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.491770] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Successfully created port: ec63fc76-bf06-4cda-aa0b-4181e5907755 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.336648] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Successfully updated port: ec63fc76-bf06-4cda-aa0b-4181e5907755 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.361316] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "refresh_cache-1438771e-fd84-4dac-81b1-c2df19972ebe" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.361469] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquired lock "refresh_cache-1438771e-fd84-4dac-81b1-c2df19972ebe" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.361621] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.432216] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.693569] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Updating instance_info_cache with network_info: [{"id": "ec63fc76-bf06-4cda-aa0b-4181e5907755", "address": "fa:16:3e:e6:b1:74", "network": {"id": "eee2f445-2d8b-4030-9026-1d16f3a579ec", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1316009416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44c73ba2f5de42c487100aa2a674976a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7806fe18-2b89-4386-87b1-f22876f82af2", "external-id": "nsx-vlan-transportzone-727", "segmentation_id": 727, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec63fc76-bf", "ovs_interfaceid": "ec63fc76-bf06-4cda-aa0b-4181e5907755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.707134] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Releasing lock "refresh_cache-1438771e-fd84-4dac-81b1-c2df19972ebe" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.707432] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance network_info: |[{"id": "ec63fc76-bf06-4cda-aa0b-4181e5907755", "address": "fa:16:3e:e6:b1:74", "network": {"id": "eee2f445-2d8b-4030-9026-1d16f3a579ec", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1316009416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44c73ba2f5de42c487100aa2a674976a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7806fe18-2b89-4386-87b1-f22876f82af2", "external-id": "nsx-vlan-transportzone-727", "segmentation_id": 727, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec63fc76-bf", "ovs_interfaceid": "ec63fc76-bf06-4cda-aa0b-4181e5907755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 697.707972] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:b1:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7806fe18-2b89-4386-87b1-f22876f82af2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec63fc76-bf06-4cda-aa0b-4181e5907755', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.717466] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Creating folder: Project (44c73ba2f5de42c487100aa2a674976a). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 697.718433] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c247441f-a054-481b-b130-a8b50cbee8a5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.730456] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Created folder: Project (44c73ba2f5de42c487100aa2a674976a) in parent group-v843372. [ 697.730658] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Creating folder: Instances. Parent ref: group-v843411. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 697.731216] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70e967a3-5747-4efa-abf8-f85509695ee2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.740586] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Created folder: Instances in parent group-v843411. [ 697.740586] env[61440]: DEBUG oslo.service.loopingcall [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.740742] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 697.740942] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e621c45-4e76-4d24-93cc-47395b5861e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.761177] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 697.761177] env[61440]: value = "task-4281253" [ 697.761177] env[61440]: _type = "Task" [ 697.761177] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.766110] env[61440]: DEBUG nova.compute.manager [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Received event network-vif-plugged-ec63fc76-bf06-4cda-aa0b-4181e5907755 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 697.766327] env[61440]: DEBUG oslo_concurrency.lockutils [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] Acquiring lock "1438771e-fd84-4dac-81b1-c2df19972ebe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.766530] env[61440]: DEBUG oslo_concurrency.lockutils [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.766767] env[61440]: DEBUG oslo_concurrency.lockutils [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.766980] env[61440]: DEBUG nova.compute.manager [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] No waiting events found dispatching network-vif-plugged-ec63fc76-bf06-4cda-aa0b-4181e5907755 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 697.767182] env[61440]: WARNING nova.compute.manager [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Received unexpected event network-vif-plugged-ec63fc76-bf06-4cda-aa0b-4181e5907755 for instance with vm_state building and task_state spawning. [ 697.767347] env[61440]: DEBUG nova.compute.manager [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Received event network-changed-ec63fc76-bf06-4cda-aa0b-4181e5907755 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 697.767499] env[61440]: DEBUG nova.compute.manager [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Refreshing instance network info cache due to event network-changed-ec63fc76-bf06-4cda-aa0b-4181e5907755. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 697.767761] env[61440]: DEBUG oslo_concurrency.lockutils [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] Acquiring lock "refresh_cache-1438771e-fd84-4dac-81b1-c2df19972ebe" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.767837] env[61440]: DEBUG oslo_concurrency.lockutils [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] Acquired lock "refresh_cache-1438771e-fd84-4dac-81b1-c2df19972ebe" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.767997] env[61440]: DEBUG nova.network.neutron [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Refreshing network info cache for port ec63fc76-bf06-4cda-aa0b-4181e5907755 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 697.774492] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281253, 'name': CreateVM_Task} progress is 5%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.142232] env[61440]: DEBUG nova.network.neutron [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Updated VIF entry in instance network info cache for port ec63fc76-bf06-4cda-aa0b-4181e5907755. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 698.142232] env[61440]: DEBUG nova.network.neutron [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Updating instance_info_cache with network_info: [{"id": "ec63fc76-bf06-4cda-aa0b-4181e5907755", "address": "fa:16:3e:e6:b1:74", "network": {"id": "eee2f445-2d8b-4030-9026-1d16f3a579ec", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1316009416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44c73ba2f5de42c487100aa2a674976a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7806fe18-2b89-4386-87b1-f22876f82af2", "external-id": "nsx-vlan-transportzone-727", "segmentation_id": 727, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec63fc76-bf", "ovs_interfaceid": "ec63fc76-bf06-4cda-aa0b-4181e5907755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.154568] env[61440]: DEBUG oslo_concurrency.lockutils [req-36762675-ff9a-479a-b0c0-a9f1dca39e67 req-281c2017-1c04-4e5b-a4fe-1b0f9cf3d7df service nova] Releasing lock "refresh_cache-1438771e-fd84-4dac-81b1-c2df19972ebe" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.272824] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281253, 'name': CreateVM_Task, 'duration_secs': 0.420384} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.273060] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 698.273697] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.273876] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.274207] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 698.274459] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6b062db-9d6d-498d-b98a-2c51d40ac4a9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.279774] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Waiting for the task: (returnval){ [ 698.279774] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524ff3e2-9482-f7a0-b4ae-9399c5e34f09" [ 698.279774] env[61440]: _type = "Task" [ 698.279774] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.292303] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524ff3e2-9482-f7a0-b4ae-9399c5e34f09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.789816] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.790101] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 698.790356] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.291728] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.292105] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.119125] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.119480] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 733.270908] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.296740] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.296891] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 733.296993] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 733.316660] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.316822] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.316952] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317089] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317212] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317402] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317466] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317580] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317707] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317826] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 733.317945] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 733.318407] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.318582] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.318746] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.275131] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.270615] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.274447] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.275054] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.287039] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.287281] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.287451] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.287842] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 737.288893] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b92ea28-a51f-4b50-a546-e3bab9ca89fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.297965] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144d2fe3-882d-4b4a-ae0f-9ac58dd613e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.312615] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81227bd-6c9c-4ea3-b844-5057a60dc42f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.318737] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfe3e87-d400-4652-9bed-b01d90619b78 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.348866] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180657MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 737.349027] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.349186] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.433319] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.433487] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f59b0b04-643c-497c-90a0-a7f885c1eb3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.433616] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 94b6a93d-de4d-4600-94af-81dce16b22f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.433739] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.433859] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.433977] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.434112] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.434261] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.434425] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.434576] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 737.448589] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.459899] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 766a9405-8a7d-4876-8569-964d2e73fedb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.470920] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.481762] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d57a929-eb61-471d-b0a1-d1e366201ccc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.492429] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.503706] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0a80de0b-a914-443b-be18-a13b0eda231e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.514196] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6a7a2d1-a50d-478c-9c27-fe58504fa14b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.523532] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a53e12-741a-4104-91d1-8d41f2b490ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.533904] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20a0ddf2-83bc-4e56-8208-12bb200c26e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.545964] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e4395e44-91f2-4f9b-a902-12859618f9cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.557398] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 276e62bf-fd35-47ed-b422-b45fb4a89ed2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.571205] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.582992] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3ca84343-76bb-46f4-89d8-3cc45ac3dc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.593097] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c737971c-735e-4317-b0e6-eb73bfc8456a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.603901] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 61c5fe8a-8fab-4fca-b03e-b583b2566162 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.623629] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a5eb3371-1899-44cb-ba30-44aa4d54b2ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.638283] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 697179bb-2391-4434-8144-ee917aa84441 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.649640] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.660762] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a91235be-0b5f-4b2a-8da3-569ec393305a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.671783] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f0ad826d-58d7-4a52-8767-4609170d964d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.681269] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d8cc1718-c721-478b-807a-d6ae1eb09c7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.691867] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.701858] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.702131] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 737.702306] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 738.099697] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790cd9da-d412-4cce-bbc9-4ab6891ea508 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.107608] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19acad0b-ce06-4077-a57a-acc66a230b11 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.137716] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b72febc-00a3-496e-b976-cca5242d8d27 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.145422] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77c9ce3-eecd-4d27-ae68-e8a0fe5e49e4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.158520] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.169849] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.185537] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 738.185732] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.837s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.832378] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.853776] env[61440]: DEBUG oslo_concurrency.lockutils [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.167410] env[61440]: DEBUG oslo_concurrency.lockutils [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "94b6a93d-de4d-4600-94af-81dce16b22f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.138281] env[61440]: WARNING oslo_vmware.rw_handles [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 743.138281] env[61440]: ERROR oslo_vmware.rw_handles [ 743.139145] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 743.141296] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 743.141684] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Copying Virtual Disk [datastore2] vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/71de8433-1757-48c2-892a-87bf0c733eca/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 743.142024] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe5a2847-e965-46ec-9de0-eed138b3b9e3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.151633] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Waiting for the task: (returnval){ [ 743.151633] env[61440]: value = "task-4281254" [ 743.151633] env[61440]: _type = "Task" [ 743.151633] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.159837] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Task: {'id': task-4281254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.661947] env[61440]: DEBUG oslo_vmware.exceptions [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 743.662263] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.662826] env[61440]: ERROR nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 743.662826] env[61440]: Faults: ['InvalidArgument'] [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Traceback (most recent call last): [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] yield resources [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self.driver.spawn(context, instance, image_meta, [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self._fetch_image_if_missing(context, vi) [ 743.662826] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] image_cache(vi, tmp_image_ds_loc) [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] vm_util.copy_virtual_disk( [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] session._wait_for_task(vmdk_copy_task) [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] return self.wait_for_task(task_ref) [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] return evt.wait() [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] result = hub.switch() [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 743.663162] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] return self.greenlet.switch() [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self.f(*self.args, **self.kw) [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] raise exceptions.translate_fault(task_info.error) [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Faults: ['InvalidArgument'] [ 743.663526] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] [ 743.663526] env[61440]: INFO nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Terminating instance [ 743.664774] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.664993] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.665638] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 743.665835] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 743.666066] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9914376-587e-4f17-a819-83234955bf6e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.668426] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecfa1bd-f8e5-45cd-9cbd-46fc23f0aa2b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.675424] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 743.675674] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b19ccb7a-0d91-4ed2-ba1c-b8bc93040012 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.677916] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.678105] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 743.679122] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7ccdea9-f7b5-485c-ba10-7f03fd881ad9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.683723] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Waiting for the task: (returnval){ [ 743.683723] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a5ed7a-4a2f-58e0-ae4a-9e1115992ebe" [ 743.683723] env[61440]: _type = "Task" [ 743.683723] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.692545] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a5ed7a-4a2f-58e0-ae4a-9e1115992ebe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.741621] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 743.741893] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 743.742121] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Deleting the datastore file [datastore2] b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.742474] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30b9739f-4e14-4b99-8631-9e92ff11576e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.749593] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Waiting for the task: (returnval){ [ 743.749593] env[61440]: value = "task-4281256" [ 743.749593] env[61440]: _type = "Task" [ 743.749593] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.757364] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Task: {'id': task-4281256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.194355] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 744.194638] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Creating directory with path [datastore2] vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.194878] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79366333-cd11-4fd9-afda-852548d0a6fa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.206235] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Created directory with path [datastore2] vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.206440] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Fetch image to [datastore2] vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 744.206623] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 744.207688] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425b3157-e148-465b-98a3-e0b56ff9a3b8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.214213] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45fea66-5075-48d2-96e8-418117f71c89 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.223505] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a9c574-d1ba-4c4f-9261-18286622ef31 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.257163] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f56ae8-fa85-40cd-9745-92e686df4401 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.265665] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4da9ecca-0223-4355-aea5-3bf050764bf3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.267266] env[61440]: DEBUG oslo_vmware.api [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Task: {'id': task-4281256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082153} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.267505] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 744.267686] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 744.267854] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 744.268031] env[61440]: INFO nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 744.270255] env[61440]: DEBUG nova.compute.claims [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 744.270429] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.270537] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.357395] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 744.406293] env[61440]: DEBUG oslo_vmware.rw_handles [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 744.469253] env[61440]: DEBUG oslo_vmware.rw_handles [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 744.469471] env[61440]: DEBUG oslo_vmware.rw_handles [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 744.645533] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.780409] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e108255a-d12f-4b05-aba6-266027ac7372 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.790054] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982fdaa3-c8d7-492f-8544-615667f78d08 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.833944] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02648646-ce7a-4838-adc9-0a266aa32892 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.842355] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3239b1c-988e-4b20-aa89-f1dfd56d545d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.856290] env[61440]: DEBUG nova.compute.provider_tree [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.865670] env[61440]: DEBUG nova.scheduler.client.report [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 744.881055] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.610s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.881631] env[61440]: ERROR nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 744.881631] env[61440]: Faults: ['InvalidArgument'] [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Traceback (most recent call last): [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self.driver.spawn(context, instance, image_meta, [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self._fetch_image_if_missing(context, vi) [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] image_cache(vi, tmp_image_ds_loc) [ 744.881631] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] vm_util.copy_virtual_disk( [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] session._wait_for_task(vmdk_copy_task) [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] return self.wait_for_task(task_ref) [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] return evt.wait() [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] result = hub.switch() [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] return self.greenlet.switch() [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 744.882070] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] self.f(*self.args, **self.kw) [ 744.882508] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 744.882508] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] raise exceptions.translate_fault(task_info.error) [ 744.882508] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 744.882508] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Faults: ['InvalidArgument'] [ 744.882508] env[61440]: ERROR nova.compute.manager [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] [ 744.882508] env[61440]: DEBUG nova.compute.utils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 744.883774] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Build of instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b was re-scheduled: A specified parameter was not correct: fileType [ 744.883774] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 744.884157] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 744.884326] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 744.884485] env[61440]: DEBUG nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 744.884657] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.252481] env[61440]: DEBUG nova.network.neutron [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.263706] env[61440]: INFO nova.compute.manager [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Took 0.38 seconds to deallocate network for instance. [ 745.370027] env[61440]: INFO nova.scheduler.client.report [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Deleted allocations for instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b [ 745.391906] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b3ad4f9-f10e-4e29-bcb2-ddcae0a0366b tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.271s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.394043] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 199.778s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.394043] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] During sync_power_state the instance has a pending task (spawning). Skip. [ 745.394043] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.394043] env[61440]: DEBUG oslo_concurrency.lockutils [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 3.540s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.394386] env[61440]: DEBUG oslo_concurrency.lockutils [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Acquiring lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.394464] env[61440]: DEBUG oslo_concurrency.lockutils [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.394706] env[61440]: DEBUG oslo_concurrency.lockutils [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.396743] env[61440]: INFO nova.compute.manager [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Terminating instance [ 745.398500] env[61440]: DEBUG nova.compute.manager [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 745.398684] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.398931] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a16ae9ef-7c9e-43c0-9ec0-2ac859250ff3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.408771] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3530519a-5888-4742-acfd-2bda5b325293 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.420095] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 745.439477] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b could not be found. [ 745.439690] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 745.439874] env[61440]: INFO nova.compute.manager [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 745.440132] env[61440]: DEBUG oslo.service.loopingcall [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.440360] env[61440]: DEBUG nova.compute.manager [-] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 745.440468] env[61440]: DEBUG nova.network.neutron [-] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.466953] env[61440]: DEBUG nova.network.neutron [-] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.476268] env[61440]: INFO nova.compute.manager [-] [instance: b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b] Took 0.04 seconds to deallocate network for instance. [ 745.494483] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.494483] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.495898] env[61440]: INFO nova.compute.claims [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.621612] env[61440]: DEBUG oslo_concurrency.lockutils [None req-11fbec2a-5b65-46bb-a4e0-7e99cc945638 tempest-ServerDiagnosticsTest-695258670 tempest-ServerDiagnosticsTest-695258670-project-member] Lock "b6dbc66b-ba8c-4f5f-93e3-fef9a50b104b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.228s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.961319] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115d7911-7b7a-4b47-940a-fee98cb296c4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.971031] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d858901d-c3b6-493a-8071-e2c11ce25b12 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.000291] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322a7757-89b8-4603-be19-7942b3a16151 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.007448] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557742f0-4f5b-4298-af4f-ee7a7b09a8b1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.023182] env[61440]: DEBUG nova.compute.provider_tree [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.035500] env[61440]: DEBUG nova.scheduler.client.report [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 746.056649] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.562s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.057174] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 746.108478] env[61440]: DEBUG nova.compute.utils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.109925] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 746.110181] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.122129] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 746.192422] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 746.227100] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.227100] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.227100] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.227263] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.227263] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.227263] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.227263] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.227263] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.228533] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.228890] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.229269] env[61440]: DEBUG nova.virt.hardware [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.231040] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c32ea5-87b3-4bfb-9dcd-9e8874405d30 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.239740] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937cacfa-e7aa-48c9-923d-5c92147fe36f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.389042] env[61440]: DEBUG nova.policy [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da0ef0d5f0bf42f897c3316a1ef40947', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c35dd3a505754c358ba43b1ada39abdc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 746.863475] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Successfully created port: ef844e49-2a16-4827-b81c-9edae8c102bb {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.931351] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Successfully updated port: ef844e49-2a16-4827-b81c-9edae8c102bb {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.953920] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "refresh_cache-f152a563-2988-4fac-9974-af25e17f14d1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.954100] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquired lock "refresh_cache-f152a563-2988-4fac-9974-af25e17f14d1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.954247] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.023485] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.162523] env[61440]: DEBUG nova.compute.manager [req-08b1eb1d-97c5-40eb-b258-0fdac9093772 req-27ef8bad-c3ab-4207-998f-6fe27462dc6e service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Received event network-vif-plugged-ef844e49-2a16-4827-b81c-9edae8c102bb {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 748.162857] env[61440]: DEBUG oslo_concurrency.lockutils [req-08b1eb1d-97c5-40eb-b258-0fdac9093772 req-27ef8bad-c3ab-4207-998f-6fe27462dc6e service nova] Acquiring lock "f152a563-2988-4fac-9974-af25e17f14d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.163518] env[61440]: DEBUG oslo_concurrency.lockutils [req-08b1eb1d-97c5-40eb-b258-0fdac9093772 req-27ef8bad-c3ab-4207-998f-6fe27462dc6e service nova] Lock "f152a563-2988-4fac-9974-af25e17f14d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.164122] env[61440]: DEBUG oslo_concurrency.lockutils [req-08b1eb1d-97c5-40eb-b258-0fdac9093772 req-27ef8bad-c3ab-4207-998f-6fe27462dc6e service nova] Lock "f152a563-2988-4fac-9974-af25e17f14d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.164488] env[61440]: DEBUG nova.compute.manager [req-08b1eb1d-97c5-40eb-b258-0fdac9093772 req-27ef8bad-c3ab-4207-998f-6fe27462dc6e service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] No waiting events found dispatching network-vif-plugged-ef844e49-2a16-4827-b81c-9edae8c102bb {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 748.165226] env[61440]: WARNING nova.compute.manager [req-08b1eb1d-97c5-40eb-b258-0fdac9093772 req-27ef8bad-c3ab-4207-998f-6fe27462dc6e service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Received unexpected event network-vif-plugged-ef844e49-2a16-4827-b81c-9edae8c102bb for instance with vm_state building and task_state spawning. [ 748.238301] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Updating instance_info_cache with network_info: [{"id": "ef844e49-2a16-4827-b81c-9edae8c102bb", "address": "fa:16:3e:36:99:82", "network": {"id": "b5c26db8-17be-44d0-8360-6ab9b680ad64", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1401579886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c35dd3a505754c358ba43b1ada39abdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef844e49-2a", "ovs_interfaceid": "ef844e49-2a16-4827-b81c-9edae8c102bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.256459] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Releasing lock "refresh_cache-f152a563-2988-4fac-9974-af25e17f14d1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.256847] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance network_info: |[{"id": "ef844e49-2a16-4827-b81c-9edae8c102bb", "address": "fa:16:3e:36:99:82", "network": {"id": "b5c26db8-17be-44d0-8360-6ab9b680ad64", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1401579886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c35dd3a505754c358ba43b1ada39abdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef844e49-2a", "ovs_interfaceid": "ef844e49-2a16-4827-b81c-9edae8c102bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 748.257749] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:99:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef844e49-2a16-4827-b81c-9edae8c102bb', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.267853] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Creating folder: Project (c35dd3a505754c358ba43b1ada39abdc). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.272178] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d5392c5-fae9-4f25-ba2f-8347c7cdcd0e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.280879] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Created folder: Project (c35dd3a505754c358ba43b1ada39abdc) in parent group-v843372. [ 748.281107] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Creating folder: Instances. Parent ref: group-v843414. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.281360] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c95c1b6-a7fc-4500-8d94-09d973bf2a38 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.297597] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Created folder: Instances in parent group-v843414. [ 748.297887] env[61440]: DEBUG oslo.service.loopingcall [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.298046] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 748.298261] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c99e8b8-ff68-4ca8-bfe1-cca4ddb07391 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.320032] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.320032] env[61440]: value = "task-4281259" [ 748.320032] env[61440]: _type = "Task" [ 748.320032] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.331823] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281259, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.831666] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281259, 'name': CreateVM_Task, 'duration_secs': 0.336461} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.831666] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 748.832288] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.832489] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.832815] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 748.833015] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3535e31-e353-4dd7-908b-9bef02c8cf7f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.837979] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Waiting for the task: (returnval){ [ 748.837979] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d331f0-90c7-4b1f-d7d3-4870095ce1d6" [ 748.837979] env[61440]: _type = "Task" [ 748.837979] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.847594] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d331f0-90c7-4b1f-d7d3-4870095ce1d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.349601] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.349873] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.350170] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.004080] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.604310] env[61440]: DEBUG nova.compute.manager [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Received event network-changed-ef844e49-2a16-4827-b81c-9edae8c102bb {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 750.604605] env[61440]: DEBUG nova.compute.manager [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Refreshing instance network info cache due to event network-changed-ef844e49-2a16-4827-b81c-9edae8c102bb. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 750.604677] env[61440]: DEBUG oslo_concurrency.lockutils [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] Acquiring lock "refresh_cache-f152a563-2988-4fac-9974-af25e17f14d1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.604789] env[61440]: DEBUG oslo_concurrency.lockutils [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] Acquired lock "refresh_cache-f152a563-2988-4fac-9974-af25e17f14d1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.605874] env[61440]: DEBUG nova.network.neutron [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Refreshing network info cache for port ef844e49-2a16-4827-b81c-9edae8c102bb {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 751.156071] env[61440]: DEBUG nova.network.neutron [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Updated VIF entry in instance network info cache for port ef844e49-2a16-4827-b81c-9edae8c102bb. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 751.156071] env[61440]: DEBUG nova.network.neutron [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Updating instance_info_cache with network_info: [{"id": "ef844e49-2a16-4827-b81c-9edae8c102bb", "address": "fa:16:3e:36:99:82", "network": {"id": "b5c26db8-17be-44d0-8360-6ab9b680ad64", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1401579886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c35dd3a505754c358ba43b1ada39abdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef844e49-2a", "ovs_interfaceid": "ef844e49-2a16-4827-b81c-9edae8c102bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.165352] env[61440]: DEBUG oslo_concurrency.lockutils [req-58bdd490-b7c2-41ef-9156-0c970d150972 req-56fc1fb2-6311-462e-aefe-badfdd3d8eb1 service nova] Releasing lock "refresh_cache-f152a563-2988-4fac-9974-af25e17f14d1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.900036] env[61440]: DEBUG oslo_concurrency.lockutils [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.152318] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.152318] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.571271] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "2d853a6e-4c2f-401e-9088-54e82bec1150" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.475899] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "1438771e-fd84-4dac-81b1-c2df19972ebe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.931980] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "3395aaef-0db6-4fab-b8a5-79b781129690" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.839110] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "f152a563-2988-4fac-9974-af25e17f14d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.773023] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c09f1745-8c4f-429f-92f8-3dbfa6e50551 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] Acquiring lock "09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.773667] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c09f1745-8c4f-429f-92f8-3dbfa6e50551 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] Lock "09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.396088] env[61440]: DEBUG oslo_concurrency.lockutils [None req-48d6d9f5-4ce6-4121-97b9-3c879a65e191 tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] Acquiring lock "f455e5bd-301f-4b08-8d41-41c969ace4f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.396493] env[61440]: DEBUG oslo_concurrency.lockutils [None req-48d6d9f5-4ce6-4121-97b9-3c879a65e191 tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] Lock "f455e5bd-301f-4b08-8d41-41c969ace4f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.545927] env[61440]: WARNING oslo_vmware.rw_handles [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 789.545927] env[61440]: ERROR oslo_vmware.rw_handles [ 789.546756] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 789.548312] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 789.548591] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Copying Virtual Disk [datastore2] vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/9289f891-ecc1-4fda-8048-a7657104635e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 789.548948] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d78dabf8-366c-49c2-a44c-a12848602b17 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.557937] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Waiting for the task: (returnval){ [ 789.557937] env[61440]: value = "task-4281260" [ 789.557937] env[61440]: _type = "Task" [ 789.557937] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.567322] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Task: {'id': task-4281260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.071013] env[61440]: DEBUG oslo_vmware.exceptions [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 790.071495] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.072267] env[61440]: ERROR nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 790.072267] env[61440]: Faults: ['InvalidArgument'] [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Traceback (most recent call last): [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] yield resources [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self.driver.spawn(context, instance, image_meta, [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self._fetch_image_if_missing(context, vi) [ 790.072267] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] image_cache(vi, tmp_image_ds_loc) [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] vm_util.copy_virtual_disk( [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] session._wait_for_task(vmdk_copy_task) [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] return self.wait_for_task(task_ref) [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] return evt.wait() [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] result = hub.switch() [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 790.072713] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] return self.greenlet.switch() [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self.f(*self.args, **self.kw) [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] raise exceptions.translate_fault(task_info.error) [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Faults: ['InvalidArgument'] [ 790.073093] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] [ 790.075020] env[61440]: INFO nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Terminating instance [ 790.075663] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.075988] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.076766] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 790.078490] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 790.078490] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cdd57bd-0702-45b3-a8f4-7a73a34876f5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.081091] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bd25de-4bc4-483b-afbd-2ac39388b7b1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.090291] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 790.090875] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dda11750-daad-4f2b-9af3-7f7a5ce82049 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.092628] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.092928] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 790.093803] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3de1840d-ecd6-4a7c-b628-e17c296c7dc9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.099931] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Waiting for the task: (returnval){ [ 790.099931] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a5082d-b2b0-8980-6462-d96ef367b8db" [ 790.099931] env[61440]: _type = "Task" [ 790.099931] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.109947] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a5082d-b2b0-8980-6462-d96ef367b8db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.161706] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 790.165312] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 790.165483] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Deleting the datastore file [datastore2] 94b6a93d-de4d-4600-94af-81dce16b22f7 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 790.165768] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-205aafc1-6947-4f2d-a64e-fba79262155c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.172322] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Waiting for the task: (returnval){ [ 790.172322] env[61440]: value = "task-4281262" [ 790.172322] env[61440]: _type = "Task" [ 790.172322] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.181137] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Task: {'id': task-4281262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.611213] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 790.611501] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Creating directory with path [datastore2] vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.611746] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44dc2101-426b-4457-a72b-a922a5275ca0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.624211] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Created directory with path [datastore2] vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.624379] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Fetch image to [datastore2] vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 790.624556] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 790.625336] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff4f799-c2e3-487f-ae52-fe393fd17236 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.632367] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7606bdc9-fe68-4de7-bd93-29d5f9b1ff1d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.642912] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05da678a-121e-4162-b130-a0f1aaae685e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.680469] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6378adb-a99f-426f-91c8-3e3b2ef32496 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.687767] env[61440]: DEBUG oslo_vmware.api [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Task: {'id': task-4281262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08649} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.690030] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 790.690213] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 790.690470] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 790.690630] env[61440]: INFO nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 790.692436] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e5bb6f66-c88c-4beb-9561-a38c11d814d6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.694718] env[61440]: DEBUG nova.compute.claims [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 790.694906] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.695127] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.787109] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 790.817379] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b57d2d32-329b-4cd2-b56b-3f4ca79e7e7f tempest-ServerDiagnosticsV248Test-1386053675 tempest-ServerDiagnosticsV248Test-1386053675-project-member] Acquiring lock "4358437a-d336-44d2-b069-60b4992adc77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.817599] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b57d2d32-329b-4cd2-b56b-3f4ca79e7e7f tempest-ServerDiagnosticsV248Test-1386053675 tempest-ServerDiagnosticsV248Test-1386053675-project-member] Lock "4358437a-d336-44d2-b069-60b4992adc77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.840099] env[61440]: DEBUG oslo_vmware.rw_handles [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 790.907677] env[61440]: DEBUG oslo_vmware.rw_handles [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 790.907876] env[61440]: DEBUG oslo_vmware.rw_handles [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 791.246693] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551d914f-93bd-429a-8e1e-df35f9d81530 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.255087] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97374cee-6a10-4198-adf2-201742b5f908 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.288157] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.288157] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 791.291126] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18b6a6b-6246-4c52-a563-7aceaebbe963 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.300323] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d78d217-02f3-4a6a-b977-c8b1c89deb78 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.305835] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] There are 0 instances to clean {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 791.306432] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.306623] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances with incomplete migration {{(pid=61440) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 791.318204] env[61440]: DEBUG nova.compute.provider_tree [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.323023] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.325806] env[61440]: DEBUG nova.scheduler.client.report [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 791.349248] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.654s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.349801] env[61440]: ERROR nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 791.349801] env[61440]: Faults: ['InvalidArgument'] [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Traceback (most recent call last): [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self.driver.spawn(context, instance, image_meta, [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self._fetch_image_if_missing(context, vi) [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] image_cache(vi, tmp_image_ds_loc) [ 791.349801] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] vm_util.copy_virtual_disk( [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] session._wait_for_task(vmdk_copy_task) [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] return self.wait_for_task(task_ref) [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] return evt.wait() [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] result = hub.switch() [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] return self.greenlet.switch() [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 791.350283] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] self.f(*self.args, **self.kw) [ 791.350660] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 791.350660] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] raise exceptions.translate_fault(task_info.error) [ 791.350660] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 791.350660] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Faults: ['InvalidArgument'] [ 791.350660] env[61440]: ERROR nova.compute.manager [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] [ 791.350660] env[61440]: DEBUG nova.compute.utils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 791.352414] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Build of instance 94b6a93d-de4d-4600-94af-81dce16b22f7 was re-scheduled: A specified parameter was not correct: fileType [ 791.352414] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 791.352785] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 791.352955] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 791.353127] env[61440]: DEBUG nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 791.353288] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 791.440977] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7df9bef3-d9f8-4b19-bd2d-5a77e5f15b44 tempest-InstanceActionsTestJSON-57248931 tempest-InstanceActionsTestJSON-57248931-project-member] Acquiring lock "20acf443-e7c3-43c8-8203-23a257532c13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.441261] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7df9bef3-d9f8-4b19-bd2d-5a77e5f15b44 tempest-InstanceActionsTestJSON-57248931 tempest-InstanceActionsTestJSON-57248931-project-member] Lock "20acf443-e7c3-43c8-8203-23a257532c13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.958128] env[61440]: DEBUG nova.network.neutron [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.971516] env[61440]: INFO nova.compute.manager [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Took 0.62 seconds to deallocate network for instance. [ 792.125565] env[61440]: INFO nova.scheduler.client.report [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Deleted allocations for instance 94b6a93d-de4d-4600-94af-81dce16b22f7 [ 792.150189] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9ab4211f-646f-4d00-91c6-6d4acea75439 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 248.725s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.151401] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 246.535s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.151591] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] During sync_power_state the instance has a pending task (spawning). Skip. [ 792.151765] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.152385] env[61440]: DEBUG oslo_concurrency.lockutils [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 49.985s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.152599] env[61440]: DEBUG oslo_concurrency.lockutils [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "94b6a93d-de4d-4600-94af-81dce16b22f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.152797] env[61440]: DEBUG oslo_concurrency.lockutils [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.152959] env[61440]: DEBUG oslo_concurrency.lockutils [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.154736] env[61440]: INFO nova.compute.manager [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Terminating instance [ 792.156465] env[61440]: DEBUG nova.compute.manager [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 792.156660] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 792.156933] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e1450e3-ffde-4f67-b9cc-ee86ca4d3fec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.167863] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd8e257-dbdb-4ad8-9eac-b5872c71a3ca {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.180813] env[61440]: DEBUG nova.compute.manager [None req-8aa1a5e9-7e17-4d6e-b779-d1bc16585272 tempest-VolumesAssistedSnapshotsTest-2106796437 tempest-VolumesAssistedSnapshotsTest-2106796437-project-member] [instance: 766a9405-8a7d-4876-8569-964d2e73fedb] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.202280] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 94b6a93d-de4d-4600-94af-81dce16b22f7 could not be found. [ 792.204377] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 792.204377] env[61440]: INFO nova.compute.manager [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 792.204377] env[61440]: DEBUG oslo.service.loopingcall [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.204377] env[61440]: DEBUG nova.compute.manager [-] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 792.204377] env[61440]: DEBUG nova.network.neutron [-] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 792.213047] env[61440]: DEBUG nova.compute.manager [None req-8aa1a5e9-7e17-4d6e-b779-d1bc16585272 tempest-VolumesAssistedSnapshotsTest-2106796437 tempest-VolumesAssistedSnapshotsTest-2106796437-project-member] [instance: 766a9405-8a7d-4876-8569-964d2e73fedb] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.246311] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8aa1a5e9-7e17-4d6e-b779-d1bc16585272 tempest-VolumesAssistedSnapshotsTest-2106796437 tempest-VolumesAssistedSnapshotsTest-2106796437-project-member] Lock "766a9405-8a7d-4876-8569-964d2e73fedb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.917s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.261021] env[61440]: DEBUG nova.compute.manager [None req-a6b92dae-08b8-49f8-95c8-2f6d764b7846 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] [instance: 9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.275440] env[61440]: DEBUG nova.network.neutron [-] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.313236] env[61440]: INFO nova.compute.manager [-] [instance: 94b6a93d-de4d-4600-94af-81dce16b22f7] Took 0.11 seconds to deallocate network for instance. [ 792.315408] env[61440]: DEBUG nova.compute.manager [None req-a6b92dae-08b8-49f8-95c8-2f6d764b7846 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] [instance: 9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.322284] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 792.322434] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 792.345613] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a6b92dae-08b8-49f8-95c8-2f6d764b7846 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] Lock "9d46e2f3-2bb8-40d2-9e45-43e8c06c8d1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.560s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.362834] env[61440]: DEBUG nova.compute.manager [None req-e2c138d8-73be-4ead-ac57-7c88da84f030 tempest-ServersWithSpecificFlavorTestJSON-1644472028 tempest-ServersWithSpecificFlavorTestJSON-1644472028-project-member] [instance: 2d57a929-eb61-471d-b0a1-d1e366201ccc] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.399592] env[61440]: DEBUG nova.compute.manager [None req-e2c138d8-73be-4ead-ac57-7c88da84f030 tempest-ServersWithSpecificFlavorTestJSON-1644472028 tempest-ServersWithSpecificFlavorTestJSON-1644472028-project-member] [instance: 2d57a929-eb61-471d-b0a1-d1e366201ccc] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.430274] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e2c138d8-73be-4ead-ac57-7c88da84f030 tempest-ServersWithSpecificFlavorTestJSON-1644472028 tempest-ServersWithSpecificFlavorTestJSON-1644472028-project-member] Lock "2d57a929-eb61-471d-b0a1-d1e366201ccc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.503s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.461836] env[61440]: DEBUG nova.compute.manager [None req-63ff02c4-c79d-48ac-afb4-72c6f969fe3c tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] [instance: 56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.468036] env[61440]: DEBUG oslo_concurrency.lockutils [None req-aa8f161f-acac-4393-aea4-37fe3c91f4d5 tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "94b6a93d-de4d-4600-94af-81dce16b22f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.315s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.490059] env[61440]: DEBUG nova.compute.manager [None req-63ff02c4-c79d-48ac-afb4-72c6f969fe3c tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] [instance: 56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.510513] env[61440]: DEBUG oslo_concurrency.lockutils [None req-63ff02c4-c79d-48ac-afb4-72c6f969fe3c tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] Lock "56f9a2d5-b28d-4a2a-bc52-2b8a00ee1c0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.685s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.520188] env[61440]: DEBUG nova.compute.manager [None req-f39d503d-726c-4392-a5cb-9bda41f75cd2 tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] [instance: 0a80de0b-a914-443b-be18-a13b0eda231e] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.543878] env[61440]: DEBUG nova.compute.manager [None req-f39d503d-726c-4392-a5cb-9bda41f75cd2 tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] [instance: 0a80de0b-a914-443b-be18-a13b0eda231e] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.565178] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f39d503d-726c-4392-a5cb-9bda41f75cd2 tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] Lock "0a80de0b-a914-443b-be18-a13b0eda231e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.004s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.574066] env[61440]: DEBUG nova.compute.manager [None req-e30b274c-6cf2-4794-8dc7-0fd5d63ece1c tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] [instance: e6a7a2d1-a50d-478c-9c27-fe58504fa14b] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.610558] env[61440]: DEBUG nova.compute.manager [None req-e30b274c-6cf2-4794-8dc7-0fd5d63ece1c tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] [instance: e6a7a2d1-a50d-478c-9c27-fe58504fa14b] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.636844] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e30b274c-6cf2-4794-8dc7-0fd5d63ece1c tempest-ListImageFiltersTestJSON-416764179 tempest-ListImageFiltersTestJSON-416764179-project-member] Lock "e6a7a2d1-a50d-478c-9c27-fe58504fa14b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.874s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.647558] env[61440]: DEBUG nova.compute.manager [None req-7feb6e47-0c97-4720-a2dc-2222b433c04f tempest-ServersAaction247Test-122133342 tempest-ServersAaction247Test-122133342-project-member] [instance: 53a53e12-741a-4104-91d1-8d41f2b490ab] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.676283] env[61440]: DEBUG nova.compute.manager [None req-7feb6e47-0c97-4720-a2dc-2222b433c04f tempest-ServersAaction247Test-122133342 tempest-ServersAaction247Test-122133342-project-member] [instance: 53a53e12-741a-4104-91d1-8d41f2b490ab] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.700242] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7feb6e47-0c97-4720-a2dc-2222b433c04f tempest-ServersAaction247Test-122133342 tempest-ServersAaction247Test-122133342-project-member] Lock "53a53e12-741a-4104-91d1-8d41f2b490ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.775s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.711744] env[61440]: DEBUG nova.compute.manager [None req-66ead4ad-257f-4867-ae78-e7f6589e9221 tempest-FloatingIPsAssociationTestJSON-1509650545 tempest-FloatingIPsAssociationTestJSON-1509650545-project-member] [instance: 20a0ddf2-83bc-4e56-8208-12bb200c26e2] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 792.741721] env[61440]: DEBUG nova.compute.manager [None req-66ead4ad-257f-4867-ae78-e7f6589e9221 tempest-FloatingIPsAssociationTestJSON-1509650545 tempest-FloatingIPsAssociationTestJSON-1509650545-project-member] [instance: 20a0ddf2-83bc-4e56-8208-12bb200c26e2] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 792.774089] env[61440]: DEBUG oslo_concurrency.lockutils [None req-66ead4ad-257f-4867-ae78-e7f6589e9221 tempest-FloatingIPsAssociationTestJSON-1509650545 tempest-FloatingIPsAssociationTestJSON-1509650545-project-member] Lock "20a0ddf2-83bc-4e56-8208-12bb200c26e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.620s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.795594] env[61440]: DEBUG nova.compute.manager [None req-d1cdad2b-b45c-41ed-9207-a40e5b8ddb4a tempest-ServersTestBootFromVolume-1926183255 tempest-ServersTestBootFromVolume-1926183255-project-member] [instance: e4395e44-91f2-4f9b-a902-12859618f9cd] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.279697] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.859069] env[61440]: DEBUG nova.compute.manager [None req-d1cdad2b-b45c-41ed-9207-a40e5b8ddb4a tempest-ServersTestBootFromVolume-1926183255 tempest-ServersTestBootFromVolume-1926183255-project-member] [instance: e4395e44-91f2-4f9b-a902-12859618f9cd] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.908134] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d1cdad2b-b45c-41ed-9207-a40e5b8ddb4a tempest-ServersTestBootFromVolume-1926183255 tempest-ServersTestBootFromVolume-1926183255-project-member] Lock "e4395e44-91f2-4f9b-a902-12859618f9cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.203s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.920802] env[61440]: DEBUG nova.compute.manager [None req-2f2b3988-ee75-48f1-933a-81b19bb484e6 tempest-ImagesNegativeTestJSON-1075041407 tempest-ImagesNegativeTestJSON-1075041407-project-member] [instance: 276e62bf-fd35-47ed-b422-b45fb4a89ed2] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.958057] env[61440]: DEBUG nova.compute.manager [None req-2f2b3988-ee75-48f1-933a-81b19bb484e6 tempest-ImagesNegativeTestJSON-1075041407 tempest-ImagesNegativeTestJSON-1075041407-project-member] [instance: 276e62bf-fd35-47ed-b422-b45fb4a89ed2] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.987642] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2f2b3988-ee75-48f1-933a-81b19bb484e6 tempest-ImagesNegativeTestJSON-1075041407 tempest-ImagesNegativeTestJSON-1075041407-project-member] Lock "276e62bf-fd35-47ed-b422-b45fb4a89ed2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.272s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.002023] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 794.082466] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.082735] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.084428] env[61440]: INFO nova.compute.claims [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.276405] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.276405] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 794.276405] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 794.302081] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302081] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302081] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302081] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302081] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302550] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302717] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.302838] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.303705] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.303864] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 794.303995] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 794.631247] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dda4451-c2e1-4b69-8cdf-09669564acf6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.635666] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb57e864-1acb-48e1-bf2e-51526643263c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.668344] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad75bd49-2398-4c0b-9d8e-5a4639559e8f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.673247] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8ccdcc32-f532-419d-b1ec-f10627704f0d tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Acquiring lock "e3447d16-79a6-4b5a-bdc0-f148276b48f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.673475] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8ccdcc32-f532-419d-b1ec-f10627704f0d tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "e3447d16-79a6-4b5a-bdc0-f148276b48f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.679147] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500c794f-91a1-4e50-8449-4cda38484f8f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.694095] env[61440]: DEBUG nova.compute.provider_tree [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.704133] env[61440]: DEBUG nova.scheduler.client.report [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.720943] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.638s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.721455] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 794.767958] env[61440]: DEBUG nova.compute.utils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 794.773215] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 794.773215] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 794.779143] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 794.873105] env[61440]: DEBUG nova.policy [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5bdfeec45574318bd594d4c72f236ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dab322726cca4a07978c53c412fa7ee9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 794.896965] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 794.930240] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.930787] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.930787] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.933034] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.933034] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.933034] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.933034] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.933034] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.933396] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.933396] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.933396] env[61440]: DEBUG nova.virt.hardware [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.934196] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffd8ae3-71bc-4770-afbd-3e6439d3571f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.944345] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69960d7a-fd6c-4af7-9427-e9e9c8b71ed9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.274896] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.275101] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.275330] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.275513] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.588166] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Successfully created port: d1fa390a-e19b-4320-b080-e425f5a1b847 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.271123] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.443169] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Successfully updated port: d1fa390a-e19b-4320-b080-e425f5a1b847 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.463058] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.463058] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquired lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.463058] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 797.571269] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.642430] env[61440]: DEBUG nova.compute.manager [req-dfaef0a5-e07f-4222-942f-9448cb2f8ccc req-403b5d39-ec4e-4c79-89de-7f7b02ef9a7a service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Received event network-vif-plugged-d1fa390a-e19b-4320-b080-e425f5a1b847 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 797.642669] env[61440]: DEBUG oslo_concurrency.lockutils [req-dfaef0a5-e07f-4222-942f-9448cb2f8ccc req-403b5d39-ec4e-4c79-89de-7f7b02ef9a7a service nova] Acquiring lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.642927] env[61440]: DEBUG oslo_concurrency.lockutils [req-dfaef0a5-e07f-4222-942f-9448cb2f8ccc req-403b5d39-ec4e-4c79-89de-7f7b02ef9a7a service nova] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.643130] env[61440]: DEBUG oslo_concurrency.lockutils [req-dfaef0a5-e07f-4222-942f-9448cb2f8ccc req-403b5d39-ec4e-4c79-89de-7f7b02ef9a7a service nova] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.643309] env[61440]: DEBUG nova.compute.manager [req-dfaef0a5-e07f-4222-942f-9448cb2f8ccc req-403b5d39-ec4e-4c79-89de-7f7b02ef9a7a service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] No waiting events found dispatching network-vif-plugged-d1fa390a-e19b-4320-b080-e425f5a1b847 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 797.643475] env[61440]: WARNING nova.compute.manager [req-dfaef0a5-e07f-4222-942f-9448cb2f8ccc req-403b5d39-ec4e-4c79-89de-7f7b02ef9a7a service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Received unexpected event network-vif-plugged-d1fa390a-e19b-4320-b080-e425f5a1b847 for instance with vm_state building and task_state spawning. [ 798.112871] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.157066] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Updating instance_info_cache with network_info: [{"id": "d1fa390a-e19b-4320-b080-e425f5a1b847", "address": "fa:16:3e:00:b4:cf", "network": {"id": "4e1ce17c-5cf1-4b72-9f25-c702529fa569", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1185919493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab322726cca4a07978c53c412fa7ee9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1fa390a-e1", "ovs_interfaceid": "d1fa390a-e19b-4320-b080-e425f5a1b847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.170656] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Releasing lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.171464] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance network_info: |[{"id": "d1fa390a-e19b-4320-b080-e425f5a1b847", "address": "fa:16:3e:00:b4:cf", "network": {"id": "4e1ce17c-5cf1-4b72-9f25-c702529fa569", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1185919493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab322726cca4a07978c53c412fa7ee9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1fa390a-e1", "ovs_interfaceid": "d1fa390a-e19b-4320-b080-e425f5a1b847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 798.172356] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:b4:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1fa390a-e19b-4320-b080-e425f5a1b847', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.182893] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Creating folder: Project (dab322726cca4a07978c53c412fa7ee9). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.184945] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-118e9eb3-f680-4407-a697-253df7e3c83f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.197891] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Created folder: Project (dab322726cca4a07978c53c412fa7ee9) in parent group-v843372. [ 798.198266] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Creating folder: Instances. Parent ref: group-v843417. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.198645] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54762d30-8264-4570-9162-007bd319094b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.212713] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Created folder: Instances in parent group-v843417. [ 798.213233] env[61440]: DEBUG oslo.service.loopingcall [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.213233] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 798.213367] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-649f8d7b-985b-4391-98b7-57d701ec14e5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.237373] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.237373] env[61440]: value = "task-4281265" [ 798.237373] env[61440]: _type = "Task" [ 798.237373] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.252092] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281265, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.273936] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.290169] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.290376] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.290545] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.290706] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 798.291903] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865cc347-63ff-469f-a572-3b42f70943c0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.301649] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc96cb0c-60b0-4c59-837b-62d97e2394fe {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.316864] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d97187-2183-45be-a2c4-9679826c24fd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.323895] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1dc345-a427-44d2-9a33-de3677b23679 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.357506] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180647MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 798.357710] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.357920] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.544101] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f59b0b04-643c-497c-90a0-a7f885c1eb3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.544971] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.544971] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.544971] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.544971] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.545256] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.545256] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.545256] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.545256] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.545392] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 798.564161] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3ca84343-76bb-46f4-89d8-3cc45ac3dc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.578057] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c737971c-735e-4317-b0e6-eb73bfc8456a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.590785] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 61c5fe8a-8fab-4fca-b03e-b583b2566162 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.602887] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a5eb3371-1899-44cb-ba30-44aa4d54b2ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.615454] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 697179bb-2391-4434-8144-ee917aa84441 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.630690] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.653637] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a91235be-0b5f-4b2a-8da3-569ec393305a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.666134] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f0ad826d-58d7-4a52-8767-4609170d964d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.681030] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d8cc1718-c721-478b-807a-d6ae1eb09c7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.692328] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.707486] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.721453] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.736277] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.748923] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281265, 'name': CreateVM_Task, 'duration_secs': 0.333583} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.749647] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 798.750902] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f455e5bd-301f-4b08-8d41-41c969ace4f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.752031] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.752135] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.755366] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 798.755366] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f93908c-2bf9-406d-b937-4d0c78a5f322 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.758754] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Waiting for the task: (returnval){ [ 798.758754] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52161b74-f760-0923-fe9e-802fcdaa27a8" [ 798.758754] env[61440]: _type = "Task" [ 798.758754] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.767299] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52161b74-f760-0923-fe9e-802fcdaa27a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.768615] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4358437a-d336-44d2-b069-60b4992adc77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.782803] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20acf443-e7c3-43c8-8203-23a257532c13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.798307] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e3447d16-79a6-4b5a-bdc0-f148276b48f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.798542] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 798.798697] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 798.818728] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing inventories for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 798.844152] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating ProviderTree inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 798.844350] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.861923] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing aggregate associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, aggregates: None {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 798.892345] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing trait associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 799.272452] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.272721] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.272931] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.296684] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a01ce27-4d8a-4fb3-b84a-1fec0938e749 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.305312] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4781aeeb-3790-42da-a34d-5a9a5cc606a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.340022] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638a0b1b-ed81-474e-b534-3b7610a340c2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.347074] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05405bee-d768-44c4-b49d-b52708c20376 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.364578] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.375019] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.408094] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 799.408094] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.050s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.957585] env[61440]: DEBUG nova.compute.manager [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Received event network-changed-d1fa390a-e19b-4320-b080-e425f5a1b847 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 799.957706] env[61440]: DEBUG nova.compute.manager [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Refreshing instance network info cache due to event network-changed-d1fa390a-e19b-4320-b080-e425f5a1b847. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 799.957923] env[61440]: DEBUG oslo_concurrency.lockutils [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] Acquiring lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.958083] env[61440]: DEBUG oslo_concurrency.lockutils [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] Acquired lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.958252] env[61440]: DEBUG nova.network.neutron [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Refreshing network info cache for port d1fa390a-e19b-4320-b080-e425f5a1b847 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 800.374209] env[61440]: DEBUG nova.network.neutron [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Updated VIF entry in instance network info cache for port d1fa390a-e19b-4320-b080-e425f5a1b847. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 800.374564] env[61440]: DEBUG nova.network.neutron [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Updating instance_info_cache with network_info: [{"id": "d1fa390a-e19b-4320-b080-e425f5a1b847", "address": "fa:16:3e:00:b4:cf", "network": {"id": "4e1ce17c-5cf1-4b72-9f25-c702529fa569", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1185919493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab322726cca4a07978c53c412fa7ee9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1fa390a-e1", "ovs_interfaceid": "d1fa390a-e19b-4320-b080-e425f5a1b847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.394039] env[61440]: DEBUG oslo_concurrency.lockutils [req-842d7d43-14f8-4812-ba1d-c0be8cab4b99 req-60e92ccf-0e39-40d4-8885-836cbbdec8f3 service nova] Releasing lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.775549] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.775855] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.176290] env[61440]: DEBUG oslo_concurrency.lockutils [None req-71f29705-800a-439b-9298-ea61a62528c3 tempest-ServerRescueTestJSONUnderV235-1738786825 tempest-ServerRescueTestJSONUnderV235-1738786825-project-member] Acquiring lock "f0ad8a9b-780b-4714-8a33-d92b922cb143" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.176579] env[61440]: DEBUG oslo_concurrency.lockutils [None req-71f29705-800a-439b-9298-ea61a62528c3 tempest-ServerRescueTestJSONUnderV235-1738786825 tempest-ServerRescueTestJSONUnderV235-1738786825-project-member] Lock "f0ad8a9b-780b-4714-8a33-d92b922cb143" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.620698] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e7e0c30e-1cd1-4e60-a00d-f42c447ba2df tempest-ServerPasswordTestJSON-1978182123 tempest-ServerPasswordTestJSON-1978182123-project-member] Acquiring lock "1a610510-68a0-45ca-aeae-c07b28f14e8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.621010] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e7e0c30e-1cd1-4e60-a00d-f42c447ba2df tempest-ServerPasswordTestJSON-1978182123 tempest-ServerPasswordTestJSON-1978182123-project-member] Lock "1a610510-68a0-45ca-aeae-c07b28f14e8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.404329] env[61440]: DEBUG oslo_concurrency.lockutils [None req-752a1a01-be36-40c0-8893-c4fe132c8ba2 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] Acquiring lock "0fd3612a-5309-403b-b853-599e731667cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.404578] env[61440]: DEBUG oslo_concurrency.lockutils [None req-752a1a01-be36-40c0-8893-c4fe132c8ba2 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] Lock "0fd3612a-5309-403b-b853-599e731667cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.606197] env[61440]: DEBUG oslo_concurrency.lockutils [None req-083612fe-e426-4e04-a493-7b26e8d86448 tempest-ServerShowV257Test-808608694 tempest-ServerShowV257Test-808608694-project-member] Acquiring lock "108d2f3d-2f7e-4757-a78f-77ccc82d831a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.606484] env[61440]: DEBUG oslo_concurrency.lockutils [None req-083612fe-e426-4e04-a493-7b26e8d86448 tempest-ServerShowV257Test-808608694 tempest-ServerShowV257Test-808608694-project-member] Lock "108d2f3d-2f7e-4757-a78f-77ccc82d831a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.465089] env[61440]: DEBUG oslo_concurrency.lockutils [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Acquiring lock "a757fd04-c309-4b79-ab13-47b70b97b79c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.465355] env[61440]: DEBUG oslo_concurrency.lockutils [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "a757fd04-c309-4b79-ab13-47b70b97b79c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.492947] env[61440]: DEBUG oslo_concurrency.lockutils [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Acquiring lock "e4a748bc-83c2-451a-bed4-f3534a649731" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.493145] env[61440]: DEBUG oslo_concurrency.lockutils [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "e4a748bc-83c2-451a-bed4-f3534a649731" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.999586] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a8ca43b6-2ce5-4bf7-b54c-2098920532f5 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "d49a7f3b-ead2-4933-9d7f-3a80ce34e306" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.999810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a8ca43b6-2ce5-4bf7-b54c-2098920532f5 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "d49a7f3b-ead2-4933-9d7f-3a80ce34e306" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.884819] env[61440]: DEBUG oslo_concurrency.lockutils [None req-30a26ca6-0f67-4d45-a750-7d8371a43784 tempest-ServerActionsTestOtherB-1257657847 tempest-ServerActionsTestOtherB-1257657847-project-member] Acquiring lock "cb538de5-4247-4096-93c3-039ea0081985" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.885121] env[61440]: DEBUG oslo_concurrency.lockutils [None req-30a26ca6-0f67-4d45-a750-7d8371a43784 tempest-ServerActionsTestOtherB-1257657847 tempest-ServerActionsTestOtherB-1257657847-project-member] Lock "cb538de5-4247-4096-93c3-039ea0081985" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.350166] env[61440]: WARNING oslo_vmware.rw_handles [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 839.350166] env[61440]: ERROR oslo_vmware.rw_handles [ 839.350926] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 839.352300] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 839.352608] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Copying Virtual Disk [datastore2] vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/f6301821-072f-43a3-8f9b-00ea9b603b6c/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 839.352829] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7801d93b-bb1c-4f7a-ba20-7e089731b693 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.361571] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Waiting for the task: (returnval){ [ 839.361571] env[61440]: value = "task-4281266" [ 839.361571] env[61440]: _type = "Task" [ 839.361571] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.369147] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Task: {'id': task-4281266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.875299] env[61440]: DEBUG oslo_vmware.exceptions [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 839.875299] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.875933] env[61440]: ERROR nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 839.875933] env[61440]: Faults: ['InvalidArgument'] [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Traceback (most recent call last): [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] yield resources [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self.driver.spawn(context, instance, image_meta, [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self._fetch_image_if_missing(context, vi) [ 839.875933] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] image_cache(vi, tmp_image_ds_loc) [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] vm_util.copy_virtual_disk( [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] session._wait_for_task(vmdk_copy_task) [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] return self.wait_for_task(task_ref) [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] return evt.wait() [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] result = hub.switch() [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 839.876483] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] return self.greenlet.switch() [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self.f(*self.args, **self.kw) [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] raise exceptions.translate_fault(task_info.error) [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Faults: ['InvalidArgument'] [ 839.876959] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] [ 839.878937] env[61440]: INFO nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Terminating instance [ 839.879802] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.880533] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.880533] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-243acc1e-b829-4d12-85ce-6dca74fd2a52 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.883053] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 839.883482] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 839.886022] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d0c816-da4b-45ec-960a-07306a8a4a4a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.891730] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 839.892209] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc3e9bdf-e8ed-447b-a665-2d6770b9d226 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.897023] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.897023] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 839.897023] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7336d2f3-0bb2-4865-958a-170615ba3208 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.901703] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Waiting for the task: (returnval){ [ 839.901703] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52385e4d-4d81-1a12-93ad-24f136af4df0" [ 839.901703] env[61440]: _type = "Task" [ 839.901703] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.912151] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52385e4d-4d81-1a12-93ad-24f136af4df0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.974735] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 839.975017] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 839.975251] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Deleting the datastore file [datastore2] f59b0b04-643c-497c-90a0-a7f885c1eb3b {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.975564] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f213a1ca-898a-4c6c-a36e-6f015182e09a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.982183] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Waiting for the task: (returnval){ [ 839.982183] env[61440]: value = "task-4281268" [ 839.982183] env[61440]: _type = "Task" [ 839.982183] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.990143] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Task: {'id': task-4281268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.411908] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 840.412227] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Creating directory with path [datastore2] vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.412473] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cb9bca3-0f29-4342-bf40-771a9313fdf0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.423688] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Created directory with path [datastore2] vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.423743] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Fetch image to [datastore2] vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 840.423916] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 840.424628] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7f583b-171f-41b3-a0fc-7d2a7b8854db {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.431942] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581faee2-97e6-47a2-afb9-f8b2e49b3184 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.440899] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52733b1-ac88-4da6-97e4-bf4d0c81bf55 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.470435] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a982a7-983d-4884-b952-ee17e47a8f1f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.476325] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-96f15cfa-58f5-4aec-a120-43a56569a936 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.490515] env[61440]: DEBUG oslo_vmware.api [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Task: {'id': task-4281268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068808} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.490932] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.490932] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 840.491160] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 840.491313] env[61440]: INFO nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 840.493532] env[61440]: DEBUG nova.compute.claims [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 840.493709] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.493925] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.500168] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 840.634136] env[61440]: DEBUG oslo_vmware.rw_handles [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 840.693845] env[61440]: DEBUG oslo_vmware.rw_handles [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 840.694054] env[61440]: DEBUG oslo_vmware.rw_handles [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 840.908206] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca4d863-748e-4c2b-8e74-29b01df3567b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.915504] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5390405b-960b-4c7e-b3f1-aae245f495b2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.944663] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3233f1d3-7b4d-4419-bbbc-d452ebc883e0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.951955] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdcc205-be8c-425c-afcb-44addbae06c1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.963826] env[61440]: DEBUG nova.compute.provider_tree [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.972714] env[61440]: DEBUG nova.scheduler.client.report [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.990232] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.496s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.990800] env[61440]: ERROR nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 840.990800] env[61440]: Faults: ['InvalidArgument'] [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Traceback (most recent call last): [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self.driver.spawn(context, instance, image_meta, [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self._fetch_image_if_missing(context, vi) [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] image_cache(vi, tmp_image_ds_loc) [ 840.990800] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] vm_util.copy_virtual_disk( [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] session._wait_for_task(vmdk_copy_task) [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] return self.wait_for_task(task_ref) [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] return evt.wait() [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] result = hub.switch() [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] return self.greenlet.switch() [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 840.991241] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] self.f(*self.args, **self.kw) [ 840.991665] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 840.991665] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] raise exceptions.translate_fault(task_info.error) [ 840.991665] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 840.991665] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Faults: ['InvalidArgument'] [ 840.991665] env[61440]: ERROR nova.compute.manager [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] [ 840.991665] env[61440]: DEBUG nova.compute.utils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 840.992896] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Build of instance f59b0b04-643c-497c-90a0-a7f885c1eb3b was re-scheduled: A specified parameter was not correct: fileType [ 840.992896] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 840.993286] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 840.993459] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 840.993616] env[61440]: DEBUG nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 840.993781] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 841.421688] env[61440]: DEBUG nova.network.neutron [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.433355] env[61440]: INFO nova.compute.manager [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Took 0.44 seconds to deallocate network for instance. [ 841.563315] env[61440]: INFO nova.scheduler.client.report [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Deleted allocations for instance f59b0b04-643c-497c-90a0-a7f885c1eb3b [ 841.589125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6c05c01-417b-4f84-ba91-a97d099029e6 tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 298.494s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.589125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 295.972s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.589125] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] During sync_power_state the instance has a pending task (spawning). Skip. [ 841.589125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.589330] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 99.757s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.590193] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.590193] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.590193] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.594594] env[61440]: INFO nova.compute.manager [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Terminating instance [ 841.596919] env[61440]: DEBUG nova.compute.manager [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 841.597248] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 841.597855] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9687609e-1f6e-4dc6-bf7d-f980b0adbccd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.600635] env[61440]: DEBUG nova.compute.manager [None req-51652501-6bb5-4da5-957a-7388dbb48d10 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 3ca84343-76bb-46f4-89d8-3cc45ac3dc0b] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 841.610106] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881f36d2-c77e-44f3-8172-bdc50c4e6665 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.627172] env[61440]: DEBUG nova.compute.manager [None req-51652501-6bb5-4da5-957a-7388dbb48d10 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 3ca84343-76bb-46f4-89d8-3cc45ac3dc0b] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 841.640445] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f59b0b04-643c-497c-90a0-a7f885c1eb3b could not be found. [ 841.640611] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 841.640825] env[61440]: INFO nova.compute.manager [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 841.641132] env[61440]: DEBUG oslo.service.loopingcall [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.641851] env[61440]: DEBUG nova.compute.manager [-] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 841.641987] env[61440]: DEBUG nova.network.neutron [-] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 841.655439] env[61440]: DEBUG oslo_concurrency.lockutils [None req-51652501-6bb5-4da5-957a-7388dbb48d10 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "3ca84343-76bb-46f4-89d8-3cc45ac3dc0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.982s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.670114] env[61440]: DEBUG nova.network.neutron [-] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.671314] env[61440]: DEBUG nova.compute.manager [None req-10e5627c-e436-4981-8b36-ede63ef21c48 tempest-ServersV294TestFqdnHostnames-1865455608 tempest-ServersV294TestFqdnHostnames-1865455608-project-member] [instance: c737971c-735e-4317-b0e6-eb73bfc8456a] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 841.679059] env[61440]: INFO nova.compute.manager [-] [instance: f59b0b04-643c-497c-90a0-a7f885c1eb3b] Took 0.04 seconds to deallocate network for instance. [ 841.696685] env[61440]: DEBUG nova.compute.manager [None req-10e5627c-e436-4981-8b36-ede63ef21c48 tempest-ServersV294TestFqdnHostnames-1865455608 tempest-ServersV294TestFqdnHostnames-1865455608-project-member] [instance: c737971c-735e-4317-b0e6-eb73bfc8456a] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 841.718076] env[61440]: DEBUG oslo_concurrency.lockutils [None req-10e5627c-e436-4981-8b36-ede63ef21c48 tempest-ServersV294TestFqdnHostnames-1865455608 tempest-ServersV294TestFqdnHostnames-1865455608-project-member] Lock "c737971c-735e-4317-b0e6-eb73bfc8456a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.635s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.728932] env[61440]: DEBUG nova.compute.manager [None req-e60257bf-cdf4-4972-8f30-67ecfdeb3ac7 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] [instance: 61c5fe8a-8fab-4fca-b03e-b583b2566162] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 841.754515] env[61440]: DEBUG nova.compute.manager [None req-e60257bf-cdf4-4972-8f30-67ecfdeb3ac7 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] [instance: 61c5fe8a-8fab-4fca-b03e-b583b2566162] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 841.781060] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e60257bf-cdf4-4972-8f30-67ecfdeb3ac7 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] Lock "61c5fe8a-8fab-4fca-b03e-b583b2566162" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.972s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.786020] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f0b8f8-8413-412e-b7e7-944e18aac08a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "f59b0b04-643c-497c-90a0-a7f885c1eb3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.792137] env[61440]: DEBUG nova.compute.manager [None req-6e36ab58-1071-49b1-a512-c4f9e3e164ee tempest-ServersListShow296Test-1936214275 tempest-ServersListShow296Test-1936214275-project-member] [instance: a5eb3371-1899-44cb-ba30-44aa4d54b2ee] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 841.815950] env[61440]: DEBUG nova.compute.manager [None req-6e36ab58-1071-49b1-a512-c4f9e3e164ee tempest-ServersListShow296Test-1936214275 tempest-ServersListShow296Test-1936214275-project-member] [instance: a5eb3371-1899-44cb-ba30-44aa4d54b2ee] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 841.839102] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6e36ab58-1071-49b1-a512-c4f9e3e164ee tempest-ServersListShow296Test-1936214275 tempest-ServersListShow296Test-1936214275-project-member] Lock "a5eb3371-1899-44cb-ba30-44aa4d54b2ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.769s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.848337] env[61440]: DEBUG nova.compute.manager [None req-089f0975-389d-4a4d-9a66-eaaf638dacc0 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 697179bb-2391-4434-8144-ee917aa84441] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 841.873981] env[61440]: DEBUG nova.compute.manager [None req-089f0975-389d-4a4d-9a66-eaaf638dacc0 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 697179bb-2391-4434-8144-ee917aa84441] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 841.895772] env[61440]: DEBUG oslo_concurrency.lockutils [None req-089f0975-389d-4a4d-9a66-eaaf638dacc0 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "697179bb-2391-4434-8144-ee917aa84441" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.264s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.906283] env[61440]: DEBUG nova.compute.manager [None req-a05aa013-816e-4894-ba46-8a3babce2e95 tempest-ServersTestJSON-1279785354 tempest-ServersTestJSON-1279785354-project-member] [instance: d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 841.934792] env[61440]: DEBUG nova.compute.manager [None req-a05aa013-816e-4894-ba46-8a3babce2e95 tempest-ServersTestJSON-1279785354 tempest-ServersTestJSON-1279785354-project-member] [instance: d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 841.958294] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a05aa013-816e-4894-ba46-8a3babce2e95 tempest-ServersTestJSON-1279785354 tempest-ServersTestJSON-1279785354-project-member] Lock "d2056fc4-ff5c-4c9f-ad5c-d0ecb466f618" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.741s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.967342] env[61440]: DEBUG nova.compute.manager [None req-1544c9f0-d684-41d6-b3e8-7a0183e9daa8 tempest-ServersTestManualDisk-1353772146 tempest-ServersTestManualDisk-1353772146-project-member] [instance: a91235be-0b5f-4b2a-8da3-569ec393305a] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 842.003096] env[61440]: DEBUG nova.compute.manager [None req-1544c9f0-d684-41d6-b3e8-7a0183e9daa8 tempest-ServersTestManualDisk-1353772146 tempest-ServersTestManualDisk-1353772146-project-member] [instance: a91235be-0b5f-4b2a-8da3-569ec393305a] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 842.024322] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1544c9f0-d684-41d6-b3e8-7a0183e9daa8 tempest-ServersTestManualDisk-1353772146 tempest-ServersTestManualDisk-1353772146-project-member] Lock "a91235be-0b5f-4b2a-8da3-569ec393305a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.859s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.033624] env[61440]: DEBUG nova.compute.manager [None req-5ffe4c69-5518-43d6-8330-826943160e12 tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] [instance: f0ad826d-58d7-4a52-8767-4609170d964d] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 842.055809] env[61440]: DEBUG nova.compute.manager [None req-5ffe4c69-5518-43d6-8330-826943160e12 tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] [instance: f0ad826d-58d7-4a52-8767-4609170d964d] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 842.076575] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5ffe4c69-5518-43d6-8330-826943160e12 tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] Lock "f0ad826d-58d7-4a52-8767-4609170d964d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.937s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.084579] env[61440]: DEBUG nova.compute.manager [None req-2e5ab2e1-51a6-496e-9728-cdebe0110fef tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] [instance: d8cc1718-c721-478b-807a-d6ae1eb09c7c] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 842.111315] env[61440]: DEBUG nova.compute.manager [None req-2e5ab2e1-51a6-496e-9728-cdebe0110fef tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] [instance: d8cc1718-c721-478b-807a-d6ae1eb09c7c] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 842.133638] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e5ab2e1-51a6-496e-9728-cdebe0110fef tempest-ServerRescueNegativeTestJSON-371561480 tempest-ServerRescueNegativeTestJSON-371561480-project-member] Lock "d8cc1718-c721-478b-807a-d6ae1eb09c7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.825s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.143975] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 842.197309] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.197566] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.199191] env[61440]: INFO nova.compute.claims [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.546642] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6478e7ff-d80a-41eb-8f71-f0e4ba52806a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Acquiring lock "9866ff2f-53c3-4ed2-865f-d418b0541025" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.546914] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6478e7ff-d80a-41eb-8f71-f0e4ba52806a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "9866ff2f-53c3-4ed2-865f-d418b0541025" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.609428] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f887a3e-7999-42b7-ad82-732872d05d59 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.617893] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827271cc-43bb-461a-8d77-9e68c6898483 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.648669] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712a3782-e3ff-4db0-8b22-d4b80a758d14 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.656105] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a47cb6-1ee5-427a-b178-2297109f6945 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.669096] env[61440]: DEBUG nova.compute.provider_tree [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.678308] env[61440]: DEBUG nova.scheduler.client.report [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 842.692106] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.494s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.692106] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 842.728751] env[61440]: DEBUG nova.compute.utils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 842.730477] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 842.731242] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 842.752316] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 842.827374] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 842.834386] env[61440]: DEBUG nova.policy [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ac928add11a4682b20e8cd0d7d44af4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '939f35c7f15c494eb37717105be409b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 842.853016] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 842.853293] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 842.853450] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.853632] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 842.853779] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.853929] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 842.854157] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 842.854316] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 842.854484] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 842.854646] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 842.854824] env[61440]: DEBUG nova.virt.hardware [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 842.855780] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9d7761-22b8-4036-a0d5-1a5ca85bcb89 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.864961] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd7c23e-4803-4c1d-89d4-ea8284c992a3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.277030] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Successfully created port: f342e9ed-b9a1-481f-934b-0bfd4c9246a2 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.045209] env[61440]: DEBUG nova.compute.manager [req-1cd23e5b-c974-4afb-96f0-ee0e5f1ff48c req-15dfb8a9-d8e5-491f-9073-9cd2f5e0dc42 service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Received event network-vif-plugged-f342e9ed-b9a1-481f-934b-0bfd4c9246a2 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 844.045209] env[61440]: DEBUG oslo_concurrency.lockutils [req-1cd23e5b-c974-4afb-96f0-ee0e5f1ff48c req-15dfb8a9-d8e5-491f-9073-9cd2f5e0dc42 service nova] Acquiring lock "23b7562f-035c-487f-a1f2-279b69ca4355-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.045209] env[61440]: DEBUG oslo_concurrency.lockutils [req-1cd23e5b-c974-4afb-96f0-ee0e5f1ff48c req-15dfb8a9-d8e5-491f-9073-9cd2f5e0dc42 service nova] Lock "23b7562f-035c-487f-a1f2-279b69ca4355-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.045560] env[61440]: DEBUG oslo_concurrency.lockutils [req-1cd23e5b-c974-4afb-96f0-ee0e5f1ff48c req-15dfb8a9-d8e5-491f-9073-9cd2f5e0dc42 service nova] Lock "23b7562f-035c-487f-a1f2-279b69ca4355-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.045560] env[61440]: DEBUG nova.compute.manager [req-1cd23e5b-c974-4afb-96f0-ee0e5f1ff48c req-15dfb8a9-d8e5-491f-9073-9cd2f5e0dc42 service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] No waiting events found dispatching network-vif-plugged-f342e9ed-b9a1-481f-934b-0bfd4c9246a2 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 844.045636] env[61440]: WARNING nova.compute.manager [req-1cd23e5b-c974-4afb-96f0-ee0e5f1ff48c req-15dfb8a9-d8e5-491f-9073-9cd2f5e0dc42 service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Received unexpected event network-vif-plugged-f342e9ed-b9a1-481f-934b-0bfd4c9246a2 for instance with vm_state building and task_state spawning. [ 844.185311] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Successfully updated port: f342e9ed-b9a1-481f-934b-0bfd4c9246a2 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.199541] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "refresh_cache-23b7562f-035c-487f-a1f2-279b69ca4355" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.199706] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquired lock "refresh_cache-23b7562f-035c-487f-a1f2-279b69ca4355" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.199879] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 844.246869] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 844.701316] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Updating instance_info_cache with network_info: [{"id": "f342e9ed-b9a1-481f-934b-0bfd4c9246a2", "address": "fa:16:3e:75:5c:4d", "network": {"id": "e97e1218-c874-4c28-95cb-0ea25ddad373", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-112770200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "939f35c7f15c494eb37717105be409b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf342e9ed-b9", "ovs_interfaceid": "f342e9ed-b9a1-481f-934b-0bfd4c9246a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.724024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Releasing lock "refresh_cache-23b7562f-035c-487f-a1f2-279b69ca4355" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.724024] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance network_info: |[{"id": "f342e9ed-b9a1-481f-934b-0bfd4c9246a2", "address": "fa:16:3e:75:5c:4d", "network": {"id": "e97e1218-c874-4c28-95cb-0ea25ddad373", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-112770200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "939f35c7f15c494eb37717105be409b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf342e9ed-b9", "ovs_interfaceid": "f342e9ed-b9a1-481f-934b-0bfd4c9246a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.724309] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:5c:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f342e9ed-b9a1-481f-934b-0bfd4c9246a2', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.730728] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Creating folder: Project (939f35c7f15c494eb37717105be409b5). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 844.731381] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47ca739f-f1f0-4f71-a2a4-71a2bb05415c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.743732] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Created folder: Project (939f35c7f15c494eb37717105be409b5) in parent group-v843372. [ 844.743732] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Creating folder: Instances. Parent ref: group-v843420. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 844.743732] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d412d43e-6fae-4d0d-a0d0-58617b249389 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.752244] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Created folder: Instances in parent group-v843420. [ 844.752480] env[61440]: DEBUG oslo.service.loopingcall [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.752656] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 844.752853] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44358dd7-26b1-4ad8-a6e8-025a2e292612 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.773025] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.773025] env[61440]: value = "task-4281271" [ 844.773025] env[61440]: _type = "Task" [ 844.773025] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.778695] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281271, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.282408] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281271, 'name': CreateVM_Task, 'duration_secs': 0.281593} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.282408] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 845.283038] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.283174] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.283556] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.283841] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f901494a-693e-4b4f-9804-2ac376a82d03 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.288457] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Waiting for the task: (returnval){ [ 845.288457] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520c602f-7fff-1144-f553-c8de06ea8bfe" [ 845.288457] env[61440]: _type = "Task" [ 845.288457] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.296427] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520c602f-7fff-1144-f553-c8de06ea8bfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.799211] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.799530] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.799813] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.127220] env[61440]: DEBUG nova.compute.manager [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Received event network-changed-f342e9ed-b9a1-481f-934b-0bfd4c9246a2 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 846.127220] env[61440]: DEBUG nova.compute.manager [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Refreshing instance network info cache due to event network-changed-f342e9ed-b9a1-481f-934b-0bfd4c9246a2. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 846.127533] env[61440]: DEBUG oslo_concurrency.lockutils [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] Acquiring lock "refresh_cache-23b7562f-035c-487f-a1f2-279b69ca4355" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.127533] env[61440]: DEBUG oslo_concurrency.lockutils [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] Acquired lock "refresh_cache-23b7562f-035c-487f-a1f2-279b69ca4355" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.127533] env[61440]: DEBUG nova.network.neutron [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Refreshing network info cache for port f342e9ed-b9a1-481f-934b-0bfd4c9246a2 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 846.494993] env[61440]: DEBUG nova.network.neutron [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Updated VIF entry in instance network info cache for port f342e9ed-b9a1-481f-934b-0bfd4c9246a2. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 846.494993] env[61440]: DEBUG nova.network.neutron [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Updating instance_info_cache with network_info: [{"id": "f342e9ed-b9a1-481f-934b-0bfd4c9246a2", "address": "fa:16:3e:75:5c:4d", "network": {"id": "e97e1218-c874-4c28-95cb-0ea25ddad373", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-112770200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "939f35c7f15c494eb37717105be409b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf342e9ed-b9", "ovs_interfaceid": "f342e9ed-b9a1-481f-934b-0bfd4c9246a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.504492] env[61440]: DEBUG oslo_concurrency.lockutils [req-7dea6f54-97ff-4348-85e0-e48dc86eb81d req-6e553fc8-8639-4ca7-904e-9f015a7c54ff service nova] Releasing lock "refresh_cache-23b7562f-035c-487f-a1f2-279b69ca4355" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.796195] env[61440]: DEBUG oslo_concurrency.lockutils [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "23b7562f-035c-487f-a1f2-279b69ca4355" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.409449] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.409791] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 855.409791] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 855.431898] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432066] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432255] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432333] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432468] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432631] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432761] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.432883] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.433009] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.433142] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 855.433262] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 855.433707] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.433895] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.434066] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 856.274597] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.274519] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.274821] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.274910] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.271227] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.295594] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.274145] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.285779] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.285996] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.286180] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.286335] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 860.287424] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa8ae4a-dc70-469d-a632-3c65b558dc53 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.296862] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706d20b1-656e-4864-b77f-cfe5d7b2ec18 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.310592] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b22a0c-2964-4a03-a2be-16a736d0fb6b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.316986] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4845e801-cb93-419d-8036-99d57500c268 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.344937] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180668MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 860.345125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.345300] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.420828] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421054] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421230] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421361] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421485] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421603] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421721] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421838] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.421952] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.422081] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 860.433562] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.443852] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.454707] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.464800] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f455e5bd-301f-4b08-8d41-41c969ace4f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.476976] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4358437a-d336-44d2-b069-60b4992adc77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.487115] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20acf443-e7c3-43c8-8203-23a257532c13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.497171] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e3447d16-79a6-4b5a-bdc0-f148276b48f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.507683] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.517770] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f0ad8a9b-780b-4714-8a33-d92b922cb143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.530266] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1a610510-68a0-45ca-aeae-c07b28f14e8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.540265] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0fd3612a-5309-403b-b853-599e731667cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.551198] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 108d2f3d-2f7e-4757-a78f-77ccc82d831a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.560651] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a757fd04-c309-4b79-ab13-47b70b97b79c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.573442] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e4a748bc-83c2-451a-bed4-f3534a649731 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.583024] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d49a7f3b-ead2-4933-9d7f-3a80ce34e306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.593156] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance cb538de5-4247-4096-93c3-039ea0081985 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.602734] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9866ff2f-53c3-4ed2-865f-d418b0541025 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.602976] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 860.603136] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 860.893248] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199e9cc7-6143-4787-a8d7-4da67f6f244a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.901143] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107e18c4-c608-4a5f-9ece-91de6cfe7701 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.930716] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6d6198-6a33-4c69-a7f7-aa0c1d9402fe {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.937806] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc01bef-b832-4446-90d1-4472bdeb141d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.950622] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.958973] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.973716] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 860.973716] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.628s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.208460] env[61440]: WARNING oslo_vmware.rw_handles [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 890.208460] env[61440]: ERROR oslo_vmware.rw_handles [ 890.209187] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 890.210723] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 890.210975] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Copying Virtual Disk [datastore2] vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/34330b11-be64-47f9-8908-ca18cc6124a9/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 890.211259] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c21c5475-af95-4e64-8070-f1ac2fdcd99c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.218764] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Waiting for the task: (returnval){ [ 890.218764] env[61440]: value = "task-4281272" [ 890.218764] env[61440]: _type = "Task" [ 890.218764] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.226396] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Task: {'id': task-4281272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.732717] env[61440]: DEBUG oslo_vmware.exceptions [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 890.733137] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.733903] env[61440]: ERROR nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 890.733903] env[61440]: Faults: ['InvalidArgument'] [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Traceback (most recent call last): [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] yield resources [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self.driver.spawn(context, instance, image_meta, [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self._fetch_image_if_missing(context, vi) [ 890.733903] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] image_cache(vi, tmp_image_ds_loc) [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] vm_util.copy_virtual_disk( [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] session._wait_for_task(vmdk_copy_task) [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] return self.wait_for_task(task_ref) [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] return evt.wait() [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] result = hub.switch() [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 890.734356] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] return self.greenlet.switch() [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self.f(*self.args, **self.kw) [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] raise exceptions.translate_fault(task_info.error) [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Faults: ['InvalidArgument'] [ 890.734799] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] [ 890.734799] env[61440]: INFO nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Terminating instance [ 890.736623] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.736909] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.737242] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c10ea5f-c6f1-4b87-aaf8-128f1e0f0130 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.740256] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 890.740519] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.741664] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d87b4d-ec69-48c8-a264-de380adea191 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.750779] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 890.752152] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f94178a-f388-48a8-8b4b-7c7cfe202961 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.754016] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.754267] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 890.755008] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2fee9a5-9dc9-47d4-ab1c-1f845b91b356 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.760230] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Waiting for the task: (returnval){ [ 890.760230] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52c068ed-f58b-c35f-8163-5d85d6a9460f" [ 890.760230] env[61440]: _type = "Task" [ 890.760230] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.772326] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52c068ed-f58b-c35f-8163-5d85d6a9460f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.836092] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 890.836704] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 890.836948] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Deleting the datastore file [datastore2] fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.837242] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c8484c6-4dea-46ef-a319-029d4ff8e07d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.844778] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Waiting for the task: (returnval){ [ 890.844778] env[61440]: value = "task-4281274" [ 890.844778] env[61440]: _type = "Task" [ 890.844778] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.852542] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Task: {'id': task-4281274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.270732] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 891.271077] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Creating directory with path [datastore2] vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.271234] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c289fe8-9454-403d-824b-19ac33f28042 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.283057] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Created directory with path [datastore2] vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.283057] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Fetch image to [datastore2] vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 891.283057] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 891.283711] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dbd0cc-64d8-4f61-a482-ee3b778e7bc1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.290066] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93702fa9-66ab-47ce-ae46-9e8869fd0166 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.299742] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43fc4cc-c277-453f-8818-da26c599ebef {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.329285] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b72641e-4ef2-4dc9-9d9a-b93f3163282a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.334643] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dccb7e2c-28ef-4332-af0e-0de87fdec815 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.352628] env[61440]: DEBUG oslo_vmware.api [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Task: {'id': task-4281274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06636} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.352917] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.353056] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 891.353235] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 891.353410] env[61440]: INFO nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Took 0.61 seconds to destroy the instance on the hypervisor. [ 891.355631] env[61440]: DEBUG nova.compute.claims [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 891.355818] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.356044] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.425685] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 891.497210] env[61440]: DEBUG oslo_vmware.rw_handles [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 891.559184] env[61440]: DEBUG oslo_vmware.rw_handles [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 891.559477] env[61440]: DEBUG oslo_vmware.rw_handles [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 891.797011] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a6a23f-c464-4d63-9b06-131e44b6970b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.804782] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674fb0e7-261c-4cc3-aeb3-9a18adcdfd3b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.834312] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2795050-8e99-4b61-9dae-6668d645b68c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.841020] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c7597f-3ed8-4823-a1a0-c292ffbfce3b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.853675] env[61440]: DEBUG nova.compute.provider_tree [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.862341] env[61440]: DEBUG nova.scheduler.client.report [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.876919] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.521s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.877453] env[61440]: ERROR nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 891.877453] env[61440]: Faults: ['InvalidArgument'] [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Traceback (most recent call last): [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self.driver.spawn(context, instance, image_meta, [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self._fetch_image_if_missing(context, vi) [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] image_cache(vi, tmp_image_ds_loc) [ 891.877453] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] vm_util.copy_virtual_disk( [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] session._wait_for_task(vmdk_copy_task) [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] return self.wait_for_task(task_ref) [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] return evt.wait() [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] result = hub.switch() [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] return self.greenlet.switch() [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 891.877872] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] self.f(*self.args, **self.kw) [ 891.878346] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 891.878346] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] raise exceptions.translate_fault(task_info.error) [ 891.878346] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 891.878346] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Faults: ['InvalidArgument'] [ 891.878346] env[61440]: ERROR nova.compute.manager [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] [ 891.878346] env[61440]: DEBUG nova.compute.utils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 891.879891] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Build of instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 was re-scheduled: A specified parameter was not correct: fileType [ 891.879891] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 891.880326] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 891.880548] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 891.880759] env[61440]: DEBUG nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 891.880960] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.335026] env[61440]: DEBUG nova.network.neutron [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.345934] env[61440]: INFO nova.compute.manager [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Took 0.46 seconds to deallocate network for instance. [ 892.456935] env[61440]: INFO nova.scheduler.client.report [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Deleted allocations for instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 [ 892.477254] env[61440]: DEBUG oslo_concurrency.lockutils [None req-14f11f77-96b2-420e-a802-017f75a5ccb5 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 346.304s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.478608] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 147.833s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.478788] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Acquiring lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.479022] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.479210] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.481397] env[61440]: INFO nova.compute.manager [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Terminating instance [ 892.487792] env[61440]: DEBUG nova.compute.manager [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 892.487992] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 892.488271] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-621ef616-86b0-4699-a2f5-f33e151ed0cb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.494543] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 892.500971] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ddfd59-5095-4e77-bdb3-24b85c006299 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.532643] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fb2c262b-17cf-44a2-a30f-a7fab3d6fe40 could not be found. [ 892.532854] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.533046] env[61440]: INFO nova.compute.manager [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Took 0.05 seconds to destroy the instance on the hypervisor. [ 892.533305] env[61440]: DEBUG oslo.service.loopingcall [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.536260] env[61440]: DEBUG nova.compute.manager [-] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 892.536367] env[61440]: DEBUG nova.network.neutron [-] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.558783] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.558857] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.560425] env[61440]: INFO nova.compute.claims [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.571779] env[61440]: DEBUG nova.network.neutron [-] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.607338] env[61440]: INFO nova.compute.manager [-] [instance: fb2c262b-17cf-44a2-a30f-a7fab3d6fe40] Took 0.07 seconds to deallocate network for instance. [ 892.706456] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ecbc78f0-96fa-4786-8e6e-2cd7de32e2f7 tempest-ServerExternalEventsTest-441374994 tempest-ServerExternalEventsTest-441374994-project-member] Lock "fb2c262b-17cf-44a2-a30f-a7fab3d6fe40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.228s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.967515] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b53f211-69e9-4702-a8b9-0e1896b1bf67 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.975170] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce81f0d2-d636-4731-a88c-ec5c27540c37 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.004623] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9ead8b-7996-4770-90ea-2eb13091d006 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.011533] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02e84d0-cb4e-4cb3-9c73-a89c445c4cda {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.024282] env[61440]: DEBUG nova.compute.provider_tree [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.033934] env[61440]: DEBUG nova.scheduler.client.report [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.051427] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.492s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.051918] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 893.084862] env[61440]: DEBUG nova.compute.utils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.090638] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 893.090638] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 893.096734] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 893.174169] env[61440]: DEBUG nova.policy [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc28d15d6cdf4cee8c76ebedd1e42ef0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4172b4c06664c728e1bd0ebe85041f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 893.193673] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 893.221872] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 893.222131] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 893.222291] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.222470] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 893.222692] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.222864] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 893.223088] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 893.223256] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 893.223422] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 893.223585] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 893.223761] env[61440]: DEBUG nova.virt.hardware [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 893.224626] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1abfaa9-ffea-4d9a-b0bc-81ed79d1a2be {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.233654] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9cd4a5-b5f7-4e1e-986f-806225f872d1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.892348] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Successfully created port: 3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.796018] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Successfully updated port: 3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.815695] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "refresh_cache-b8a27ad2-4cc5-4219-9bc3-5735433b153c" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.815906] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquired lock "refresh_cache-b8a27ad2-4cc5-4219-9bc3-5735433b153c" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.815994] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.886260] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.058135] env[61440]: DEBUG nova.compute.manager [req-9e2d8c6d-25f0-45ab-9285-3cf73316d309 req-4b0943da-551f-4228-b798-b4c2294851f5 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Received event network-vif-plugged-3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 898.058135] env[61440]: DEBUG oslo_concurrency.lockutils [req-9e2d8c6d-25f0-45ab-9285-3cf73316d309 req-4b0943da-551f-4228-b798-b4c2294851f5 service nova] Acquiring lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.058582] env[61440]: DEBUG oslo_concurrency.lockutils [req-9e2d8c6d-25f0-45ab-9285-3cf73316d309 req-4b0943da-551f-4228-b798-b4c2294851f5 service nova] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.058582] env[61440]: DEBUG oslo_concurrency.lockutils [req-9e2d8c6d-25f0-45ab-9285-3cf73316d309 req-4b0943da-551f-4228-b798-b4c2294851f5 service nova] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.058582] env[61440]: DEBUG nova.compute.manager [req-9e2d8c6d-25f0-45ab-9285-3cf73316d309 req-4b0943da-551f-4228-b798-b4c2294851f5 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] No waiting events found dispatching network-vif-plugged-3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 898.058724] env[61440]: WARNING nova.compute.manager [req-9e2d8c6d-25f0-45ab-9285-3cf73316d309 req-4b0943da-551f-4228-b798-b4c2294851f5 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Received unexpected event network-vif-plugged-3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf for instance with vm_state building and task_state spawning. [ 898.126509] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Updating instance_info_cache with network_info: [{"id": "3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf", "address": "fa:16:3e:42:0a:3e", "network": {"id": "f164a53f-649f-4fb2-80dc-88ebeb34f5a1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2131785083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e4172b4c06664c728e1bd0ebe85041f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3acfb99f-d0", "ovs_interfaceid": "3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.142494] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Releasing lock "refresh_cache-b8a27ad2-4cc5-4219-9bc3-5735433b153c" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.142675] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance network_info: |[{"id": "3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf", "address": "fa:16:3e:42:0a:3e", "network": {"id": "f164a53f-649f-4fb2-80dc-88ebeb34f5a1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2131785083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e4172b4c06664c728e1bd0ebe85041f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3acfb99f-d0", "ovs_interfaceid": "3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 898.143608] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:0a:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.151882] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Creating folder: Project (e4172b4c06664c728e1bd0ebe85041f8). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 898.152500] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-472e2d10-524c-426a-82db-25a65e4865c1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.164702] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Created folder: Project (e4172b4c06664c728e1bd0ebe85041f8) in parent group-v843372. [ 898.164903] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Creating folder: Instances. Parent ref: group-v843423. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 898.165163] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86e891e3-fdb4-45e5-9435-0fe50a01e81b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.175260] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Created folder: Instances in parent group-v843423. [ 898.175353] env[61440]: DEBUG oslo.service.loopingcall [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.175521] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 898.175711] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43a4cd60-bee5-4ae2-b242-4a3fcfad12a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.201276] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.201276] env[61440]: value = "task-4281277" [ 898.201276] env[61440]: _type = "Task" [ 898.201276] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.213170] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281277, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.713937] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281277, 'name': CreateVM_Task, 'duration_secs': 0.280833} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.714145] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 898.714863] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.715043] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.715362] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.715615] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8095409-0924-4ee0-b8a3-59e3ddaf8499 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.720372] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Waiting for the task: (returnval){ [ 898.720372] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e91bc1-1616-34ea-da89-2afbf3f0de21" [ 898.720372] env[61440]: _type = "Task" [ 898.720372] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.732257] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52e91bc1-1616-34ea-da89-2afbf3f0de21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.235131] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.235810] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.235810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.163701] env[61440]: DEBUG nova.compute.manager [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Received event network-changed-3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 900.163882] env[61440]: DEBUG nova.compute.manager [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Refreshing instance network info cache due to event network-changed-3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 900.164106] env[61440]: DEBUG oslo_concurrency.lockutils [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] Acquiring lock "refresh_cache-b8a27ad2-4cc5-4219-9bc3-5735433b153c" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.164269] env[61440]: DEBUG oslo_concurrency.lockutils [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] Acquired lock "refresh_cache-b8a27ad2-4cc5-4219-9bc3-5735433b153c" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.164440] env[61440]: DEBUG nova.network.neutron [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Refreshing network info cache for port 3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 900.295730] env[61440]: DEBUG oslo_concurrency.lockutils [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.926600] env[61440]: DEBUG nova.network.neutron [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Updated VIF entry in instance network info cache for port 3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 900.926967] env[61440]: DEBUG nova.network.neutron [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Updating instance_info_cache with network_info: [{"id": "3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf", "address": "fa:16:3e:42:0a:3e", "network": {"id": "f164a53f-649f-4fb2-80dc-88ebeb34f5a1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2131785083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e4172b4c06664c728e1bd0ebe85041f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3acfb99f-d0", "ovs_interfaceid": "3acfb99f-d0ca-474d-ba91-7fa0d0dfb2bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.940505] env[61440]: DEBUG oslo_concurrency.lockutils [req-5b1762a9-9a52-435e-aa9c-2433caca0a38 req-a73f89fb-24a8-4fc8-817c-0cfa80dd8a52 service nova] Releasing lock "refresh_cache-b8a27ad2-4cc5-4219-9bc3-5735433b153c" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.281182] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.281182] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.979882] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "faf90964-1814-459f-89ef-0a27808077c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.980120] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "faf90964-1814-459f-89ef-0a27808077c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.517917] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f8f68622-fb90-45e4-99ab-d90adb4c66c4 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "177602d8-99ae-40df-ac3b-63374dde5715" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.518257] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f8f68622-fb90-45e4-99ab-d90adb4c66c4 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "177602d8-99ae-40df-ac3b-63374dde5715" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.973651] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.274586] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.274771] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 917.275531] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.275834] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 917.275834] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 917.297277] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.297448] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.297590] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.297720] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.297843] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.297966] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.298190] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.298338] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.298461] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.298580] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 917.298722] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 917.299214] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.299386] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.273681] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.273913] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.269571] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.274649] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.287027] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.287212] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.288028] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.288028] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 920.291037] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd1d84c-1e59-4575-8e43-c17e716b7f49 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.299890] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375f33a2-0ea3-433c-81b3-4bed0f61a07d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.313658] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35ded84-84d7-4c57-9cc8-6f71478de8ad {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.319967] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2770aed-abf3-43d0-9c03-76be1128dbf6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.348100] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180688MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 920.348261] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.348453] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.426175] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.426347] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.426475] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.426616] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.426773] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.426895] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.427354] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.427354] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.427354] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.427354] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 920.438930] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.449881] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.464137] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f455e5bd-301f-4b08-8d41-41c969ace4f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.474723] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4358437a-d336-44d2-b069-60b4992adc77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.484357] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20acf443-e7c3-43c8-8203-23a257532c13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.497198] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e3447d16-79a6-4b5a-bdc0-f148276b48f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.506321] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.517436] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f0ad8a9b-780b-4714-8a33-d92b922cb143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.527622] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1a610510-68a0-45ca-aeae-c07b28f14e8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.539015] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0fd3612a-5309-403b-b853-599e731667cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.548559] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 108d2f3d-2f7e-4757-a78f-77ccc82d831a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.558969] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a757fd04-c309-4b79-ab13-47b70b97b79c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.568575] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e4a748bc-83c2-451a-bed4-f3534a649731 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.578509] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d49a7f3b-ead2-4933-9d7f-3a80ce34e306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.588292] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance cb538de5-4247-4096-93c3-039ea0081985 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.598237] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9866ff2f-53c3-4ed2-865f-d418b0541025 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.607643] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.617031] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.627142] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 177602d8-99ae-40df-ac3b-63374dde5715 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.627381] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 920.627529] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 920.946190] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed8d2bd-1dce-4a35-a4cd-b3f7052fb832 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.954073] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2389325-0d8c-4c3a-ae92-692dd0920f5b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.982946] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d778ef7-03f5-4b34-9b9c-0068ed2b4f57 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.990123] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d6aa8b-6afc-414a-9779-37c24c018f2f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.003198] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.011945] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.026077] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 921.026274] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.678s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.629759] env[61440]: WARNING oslo_vmware.rw_handles [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 938.629759] env[61440]: ERROR oslo_vmware.rw_handles [ 938.630425] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 938.632820] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 938.633110] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Copying Virtual Disk [datastore2] vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/931406b5-25d7-4bb0-a650-8f7acd9ea4e2/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 938.633553] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75d20eec-218b-4e8e-a540-fa821901d01b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.642305] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Waiting for the task: (returnval){ [ 938.642305] env[61440]: value = "task-4281278" [ 938.642305] env[61440]: _type = "Task" [ 938.642305] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.650121] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Task: {'id': task-4281278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.152558] env[61440]: DEBUG oslo_vmware.exceptions [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 939.152879] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.153442] env[61440]: ERROR nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 939.153442] env[61440]: Faults: ['InvalidArgument'] [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Traceback (most recent call last): [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] yield resources [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self.driver.spawn(context, instance, image_meta, [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self._fetch_image_if_missing(context, vi) [ 939.153442] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] image_cache(vi, tmp_image_ds_loc) [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] vm_util.copy_virtual_disk( [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] session._wait_for_task(vmdk_copy_task) [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] return self.wait_for_task(task_ref) [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] return evt.wait() [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] result = hub.switch() [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 939.153843] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] return self.greenlet.switch() [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self.f(*self.args, **self.kw) [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] raise exceptions.translate_fault(task_info.error) [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Faults: ['InvalidArgument'] [ 939.154197] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] [ 939.154197] env[61440]: INFO nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Terminating instance [ 939.155375] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.155582] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.155811] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c93a460e-7ff3-42e1-bf98-64869edfd0bf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.157854] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 939.158062] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 939.158753] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3751dc58-f709-4f15-9f23-85b59363925c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.165550] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 939.165825] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2295b2b1-0b08-48a9-9a31-f51b0f34dbce {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.167881] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.168071] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 939.169026] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3d4e1d1-80f8-409f-8c6d-0995c36a9b4a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.173406] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 939.173406] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]529ac545-aae4-6931-6f32-1ca7a3fcf22b" [ 939.173406] env[61440]: _type = "Task" [ 939.173406] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.180515] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]529ac545-aae4-6931-6f32-1ca7a3fcf22b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.235836] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 939.236079] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 939.236272] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Deleting the datastore file [datastore2] f21a02ec-4fa2-439c-aa56-570e175a8b5e {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.236608] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04543518-c185-4be9-895a-2f0ea204f214 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.244447] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Waiting for the task: (returnval){ [ 939.244447] env[61440]: value = "task-4281280" [ 939.244447] env[61440]: _type = "Task" [ 939.244447] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.252752] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Task: {'id': task-4281280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.685893] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 939.686204] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating directory with path [datastore2] vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.686449] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20fa4a9a-c13e-4920-aca8-84f975e11073 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.697814] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Created directory with path [datastore2] vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.698016] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Fetch image to [datastore2] vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 939.698196] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 939.698918] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b5a7c9-2d0f-4bdd-918c-5be77b3b801a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.705667] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d00c51-76ad-4aa0-969d-022380dc0e20 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.714868] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131a5713-00ea-4f19-beeb-75afc12658bb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.749700] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af5bf3d-7631-4876-9fec-ee0e68aaf211 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.756820] env[61440]: DEBUG oslo_vmware.api [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Task: {'id': task-4281280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081143} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.758344] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.758541] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 939.758825] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 939.759042] env[61440]: INFO nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 939.761406] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f44bd25e-a751-4886-b9a6-9f9df643588a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.763354] env[61440]: DEBUG nova.compute.claims [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 939.763529] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.763739] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.787024] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 939.840508] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 939.905625] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 939.905829] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 940.208272] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcca2b2-a793-4dd7-aaca-a447be5d3cbf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.216398] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da06d24b-beb4-4752-bc19-76f73fba5293 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.245318] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f02d58-bed5-4ce7-8b28-bf73f30ef131 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.252301] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebf1538-dd8b-4e66-8179-2b347e08c3c3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.265091] env[61440]: DEBUG nova.compute.provider_tree [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.274308] env[61440]: DEBUG nova.scheduler.client.report [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.287655] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.524s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.288188] env[61440]: ERROR nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 940.288188] env[61440]: Faults: ['InvalidArgument'] [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Traceback (most recent call last): [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self.driver.spawn(context, instance, image_meta, [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self._fetch_image_if_missing(context, vi) [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] image_cache(vi, tmp_image_ds_loc) [ 940.288188] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] vm_util.copy_virtual_disk( [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] session._wait_for_task(vmdk_copy_task) [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] return self.wait_for_task(task_ref) [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] return evt.wait() [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] result = hub.switch() [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] return self.greenlet.switch() [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 940.288504] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] self.f(*self.args, **self.kw) [ 940.288839] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 940.288839] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] raise exceptions.translate_fault(task_info.error) [ 940.288839] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 940.288839] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Faults: ['InvalidArgument'] [ 940.288839] env[61440]: ERROR nova.compute.manager [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] [ 940.288968] env[61440]: DEBUG nova.compute.utils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 940.290300] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Build of instance f21a02ec-4fa2-439c-aa56-570e175a8b5e was re-scheduled: A specified parameter was not correct: fileType [ 940.290300] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 940.290683] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 940.290857] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 940.291023] env[61440]: DEBUG nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 940.291192] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 940.641120] env[61440]: DEBUG nova.network.neutron [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.653187] env[61440]: INFO nova.compute.manager [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Took 0.36 seconds to deallocate network for instance. [ 940.766726] env[61440]: INFO nova.scheduler.client.report [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Deleted allocations for instance f21a02ec-4fa2-439c-aa56-570e175a8b5e [ 940.789751] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3946f9c1-aa0b-4d4a-9644-2da37935b75c tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 390.352s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.789751] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 190.786s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.789751] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Acquiring lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.789961] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.789961] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.792765] env[61440]: INFO nova.compute.manager [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Terminating instance [ 940.794878] env[61440]: DEBUG nova.compute.manager [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 940.795208] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 940.796431] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee0d9aa5-8ba8-42dd-ab85-84d1926b236e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.802073] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 940.812014] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da49650d-8a02-4892-9504-fcb84232c582 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.838530] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f21a02ec-4fa2-439c-aa56-570e175a8b5e could not be found. [ 940.838885] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 940.839177] env[61440]: INFO nova.compute.manager [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 940.839535] env[61440]: DEBUG oslo.service.loopingcall [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.840497] env[61440]: DEBUG nova.compute.manager [-] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 940.840749] env[61440]: DEBUG nova.network.neutron [-] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 940.857984] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.858102] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.860041] env[61440]: INFO nova.compute.claims [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.875272] env[61440]: DEBUG nova.network.neutron [-] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.893416] env[61440]: INFO nova.compute.manager [-] [instance: f21a02ec-4fa2-439c-aa56-570e175a8b5e] Took 0.05 seconds to deallocate network for instance. [ 940.993412] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a55efe26-4038-45cd-82c2-1529b0b9a09f tempest-ServerDiagnosticsNegativeTest-1475268640 tempest-ServerDiagnosticsNegativeTest-1475268640-project-member] Lock "f21a02ec-4fa2-439c-aa56-570e175a8b5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.243037] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad08507b-2f63-4717-a569-a6443e14ffed {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.251832] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2b2ada-6696-4f88-b86c-80b8c927223e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.281772] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e7ad2e-1071-4fd8-8367-692a66199464 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.288527] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b1f899-206f-4e45-b948-b163127be24c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.301052] env[61440]: DEBUG nova.compute.provider_tree [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.309063] env[61440]: DEBUG nova.scheduler.client.report [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.322800] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.465s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.323274] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 941.370029] env[61440]: DEBUG nova.compute.utils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 941.371121] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 941.371346] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 941.385048] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 941.436177] env[61440]: DEBUG nova.policy [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78cf1d8b3e11413ea7eda9db3b850cee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be044e90b0144f31afb4c886aff2dd42', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 941.457630] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 941.490532] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.490994] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.491327] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.491637] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.492593] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.492593] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.492593] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.492593] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.492749] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.492775] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.492937] env[61440]: DEBUG nova.virt.hardware [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.494082] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f69c0f-731b-464e-9d1f-16c17de12f6f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.502173] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d53e534-c588-4ca3-bc15-7dcdd6fdc2c0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.098379] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Successfully created port: 7dce5d10-01a1-4acb-96eb-b165b19a330f {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 942.903846] env[61440]: DEBUG nova.compute.manager [req-a620b3f7-9c8d-40ba-810b-fefb99c9dd89 req-70a323b8-0f72-47b9-8d6a-a322519025c6 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Received event network-vif-plugged-7dce5d10-01a1-4acb-96eb-b165b19a330f {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 942.904114] env[61440]: DEBUG oslo_concurrency.lockutils [req-a620b3f7-9c8d-40ba-810b-fefb99c9dd89 req-70a323b8-0f72-47b9-8d6a-a322519025c6 service nova] Acquiring lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.904502] env[61440]: DEBUG oslo_concurrency.lockutils [req-a620b3f7-9c8d-40ba-810b-fefb99c9dd89 req-70a323b8-0f72-47b9-8d6a-a322519025c6 service nova] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.905090] env[61440]: DEBUG oslo_concurrency.lockutils [req-a620b3f7-9c8d-40ba-810b-fefb99c9dd89 req-70a323b8-0f72-47b9-8d6a-a322519025c6 service nova] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.905288] env[61440]: DEBUG nova.compute.manager [req-a620b3f7-9c8d-40ba-810b-fefb99c9dd89 req-70a323b8-0f72-47b9-8d6a-a322519025c6 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] No waiting events found dispatching network-vif-plugged-7dce5d10-01a1-4acb-96eb-b165b19a330f {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 942.905464] env[61440]: WARNING nova.compute.manager [req-a620b3f7-9c8d-40ba-810b-fefb99c9dd89 req-70a323b8-0f72-47b9-8d6a-a322519025c6 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Received unexpected event network-vif-plugged-7dce5d10-01a1-4acb-96eb-b165b19a330f for instance with vm_state building and task_state spawning. [ 943.006795] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Successfully updated port: 7dce5d10-01a1-4acb-96eb-b165b19a330f {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 943.025013] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "refresh_cache-07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.025241] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquired lock "refresh_cache-07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.025462] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 943.104058] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 943.361486] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Updating instance_info_cache with network_info: [{"id": "7dce5d10-01a1-4acb-96eb-b165b19a330f", "address": "fa:16:3e:37:bd:0b", "network": {"id": "35281033-bcc2-411a-8314-ca99e5e31b67", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-657626143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be044e90b0144f31afb4c886aff2dd42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dce5d10-01", "ovs_interfaceid": "7dce5d10-01a1-4acb-96eb-b165b19a330f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.375206] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Releasing lock "refresh_cache-07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.375516] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance network_info: |[{"id": "7dce5d10-01a1-4acb-96eb-b165b19a330f", "address": "fa:16:3e:37:bd:0b", "network": {"id": "35281033-bcc2-411a-8314-ca99e5e31b67", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-657626143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be044e90b0144f31afb4c886aff2dd42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dce5d10-01", "ovs_interfaceid": "7dce5d10-01a1-4acb-96eb-b165b19a330f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.376115] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:bd:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ebd8af-aaf6-4d04-b869-3882e2571ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dce5d10-01a1-4acb-96eb-b165b19a330f', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.383461] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Creating folder: Project (be044e90b0144f31afb4c886aff2dd42). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 943.383981] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e25e964-3e2c-4c27-bf37-f79437522c88 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.396614] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Created folder: Project (be044e90b0144f31afb4c886aff2dd42) in parent group-v843372. [ 943.396891] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Creating folder: Instances. Parent ref: group-v843426. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 943.397048] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d5e3c97-ce6d-468f-9122-72d302e08882 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.406844] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Created folder: Instances in parent group-v843426. [ 943.407228] env[61440]: DEBUG oslo.service.loopingcall [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.407309] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 943.407531] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0d64b1a-b740-4bf0-bf95-00978abf66e7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.430021] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.430021] env[61440]: value = "task-4281283" [ 943.430021] env[61440]: _type = "Task" [ 943.430021] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.435857] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281283, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.938187] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281283, 'name': CreateVM_Task, 'duration_secs': 0.400534} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.938378] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 943.939039] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.939209] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.939957] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.939957] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-807d179a-e9d8-4927-ae4d-09d7edce99ac {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.944459] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Waiting for the task: (returnval){ [ 943.944459] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a2997a-3408-a9cd-bf78-c00f4d35afed" [ 943.944459] env[61440]: _type = "Task" [ 943.944459] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.951717] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a2997a-3408-a9cd-bf78-c00f4d35afed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.457862] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.458765] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.459010] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.939162] env[61440]: DEBUG nova.compute.manager [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Received event network-changed-7dce5d10-01a1-4acb-96eb-b165b19a330f {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 944.939162] env[61440]: DEBUG nova.compute.manager [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Refreshing instance network info cache due to event network-changed-7dce5d10-01a1-4acb-96eb-b165b19a330f. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 944.939162] env[61440]: DEBUG oslo_concurrency.lockutils [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] Acquiring lock "refresh_cache-07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.939162] env[61440]: DEBUG oslo_concurrency.lockutils [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] Acquired lock "refresh_cache-07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.939162] env[61440]: DEBUG nova.network.neutron [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Refreshing network info cache for port 7dce5d10-01a1-4acb-96eb-b165b19a330f {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.520554] env[61440]: DEBUG nova.network.neutron [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Updated VIF entry in instance network info cache for port 7dce5d10-01a1-4acb-96eb-b165b19a330f. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 945.520997] env[61440]: DEBUG nova.network.neutron [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Updating instance_info_cache with network_info: [{"id": "7dce5d10-01a1-4acb-96eb-b165b19a330f", "address": "fa:16:3e:37:bd:0b", "network": {"id": "35281033-bcc2-411a-8314-ca99e5e31b67", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-657626143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be044e90b0144f31afb4c886aff2dd42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dce5d10-01", "ovs_interfaceid": "7dce5d10-01a1-4acb-96eb-b165b19a330f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.532788] env[61440]: DEBUG oslo_concurrency.lockutils [req-a7163b56-fe39-4ba6-ae23-d9d994594493 req-a83d957b-42ce-4aa2-b1ba-1037e93964c2 service nova] Releasing lock "refresh_cache-07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.232632] env[61440]: DEBUG oslo_concurrency.lockutils [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.235214] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "726c5ed6-d706-4886-a2c1-fc666a527662" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.235547] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "726c5ed6-d706-4886-a2c1-fc666a527662" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.640030] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b0e8daa-5362-4527-b675-f341701fd618 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "01f72fa9-b392-4789-bc71-6339634efc28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.640318] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b0e8daa-5362-4527-b675-f341701fd618 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "01f72fa9-b392-4789-bc71-6339634efc28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.346763] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.679493] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1bd2ce90-e5c2-4a64-9a6b-1070b3330706 tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] Acquiring lock "b5c17233-358d-489d-8897-96cc38427164" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.680147] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1bd2ce90-e5c2-4a64-9a6b-1070b3330706 tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] Lock "b5c17233-358d-489d-8897-96cc38427164" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.106494] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f391f9a5-a54e-4b81-8460-69790bd30b1a tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] Acquiring lock "67e6bf8c-34cb-4918-9680-a707ffd09e35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.106785] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f391f9a5-a54e-4b81-8460-69790bd30b1a tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] Lock "67e6bf8c-34cb-4918-9680-a707ffd09e35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.026630] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 977.026926] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 977.027048] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 978.275289] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.275570] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 978.275603] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 978.300289] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300289] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300289] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300289] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300289] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300513] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300587] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300715] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300840] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.300960] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 978.301358] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 979.274858] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.303347] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.303347] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.274553] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.274860] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.275026] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.288836] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.289068] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.289256] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.289416] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 980.290577] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae550ff-7e91-444b-a3a5-610aad7880dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.303611] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f966abdc-4bb9-4f51-8cbb-550bc2ae0cfe {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.317050] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a024c7-fb8b-41be-8f42-e05c932c5223 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.323273] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785a01b3-37e0-464e-8bb0-18fc7d2ca987 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.351462] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180672MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 980.351617] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.351811] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.442788] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.442944] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.443674] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.443846] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.443974] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.444111] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.444232] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.444343] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.444454] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.444563] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 980.460752] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f455e5bd-301f-4b08-8d41-41c969ace4f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.475418] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4358437a-d336-44d2-b069-60b4992adc77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.493536] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20acf443-e7c3-43c8-8203-23a257532c13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.506452] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e3447d16-79a6-4b5a-bdc0-f148276b48f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.519426] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.537175] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f0ad8a9b-780b-4714-8a33-d92b922cb143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.548948] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1a610510-68a0-45ca-aeae-c07b28f14e8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.569951] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0fd3612a-5309-403b-b853-599e731667cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.579692] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 108d2f3d-2f7e-4757-a78f-77ccc82d831a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.589624] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a757fd04-c309-4b79-ab13-47b70b97b79c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.600477] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e4a748bc-83c2-451a-bed4-f3534a649731 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.613109] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d49a7f3b-ead2-4933-9d7f-3a80ce34e306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.630400] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance cb538de5-4247-4096-93c3-039ea0081985 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.644749] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9866ff2f-53c3-4ed2-865f-d418b0541025 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.656579] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.672565] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.683956] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 177602d8-99ae-40df-ac3b-63374dde5715 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.695877] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.706852] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 01f72fa9-b392-4789-bc71-6339634efc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.717455] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b5c17233-358d-489d-8897-96cc38427164 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.730366] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 67e6bf8c-34cb-4918-9680-a707ffd09e35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.730674] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 980.730837] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 981.228025] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e37fa24-21ec-42de-b4b6-21249755915c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.235114] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb3e61d-a46d-4fe5-934b-883f818243d1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.269255] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3923f15-de10-461c-a52c-89e115fcac28 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.281023] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07abc24e-7a77-493c-bb11-72015f08a52b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.293080] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.304050] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.326669] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 981.326865] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.975s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.322742] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.350906] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d8812c4a-da42-4ca1-9b8a-afecb9679d9f tempest-ServerActionsV293TestJSON-573375651 tempest-ServerActionsV293TestJSON-573375651-project-member] Acquiring lock "a14259c3-7af5-4d14-866a-48763fe2faaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.351202] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d8812c4a-da42-4ca1-9b8a-afecb9679d9f tempest-ServerActionsV293TestJSON-573375651 tempest-ServerActionsV293TestJSON-573375651-project-member] Lock "a14259c3-7af5-4d14-866a-48763fe2faaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.154513] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fa5411aa-b459-4707-b734-80d679e20f0a tempest-ServerAddressesTestJSON-756750789 tempest-ServerAddressesTestJSON-756750789-project-member] Acquiring lock "51f670be-26a7-4248-a0b7-386968bed988" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.154960] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fa5411aa-b459-4707-b734-80d679e20f0a tempest-ServerAddressesTestJSON-756750789 tempest-ServerAddressesTestJSON-756750789-project-member] Lock "51f670be-26a7-4248-a0b7-386968bed988" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.404236] env[61440]: WARNING oslo_vmware.rw_handles [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 989.404236] env[61440]: ERROR oslo_vmware.rw_handles [ 989.404236] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 989.404770] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 989.404770] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Copying Virtual Disk [datastore2] vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/cf6802b5-d5c8-4e96-853b-f9a350821b30/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 989.405535] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3870d767-ac4e-412c-bfff-bd730ff92dd5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.416025] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 989.416025] env[61440]: value = "task-4281294" [ 989.416025] env[61440]: _type = "Task" [ 989.416025] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.421945] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': task-4281294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.929509] env[61440]: DEBUG oslo_vmware.exceptions [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 989.929655] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.930228] env[61440]: ERROR nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.930228] env[61440]: Faults: ['InvalidArgument'] [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Traceback (most recent call last): [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] yield resources [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self.driver.spawn(context, instance, image_meta, [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self._fetch_image_if_missing(context, vi) [ 989.930228] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] image_cache(vi, tmp_image_ds_loc) [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] vm_util.copy_virtual_disk( [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] session._wait_for_task(vmdk_copy_task) [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] return self.wait_for_task(task_ref) [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] return evt.wait() [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] result = hub.switch() [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 989.930572] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] return self.greenlet.switch() [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self.f(*self.args, **self.kw) [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] raise exceptions.translate_fault(task_info.error) [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Faults: ['InvalidArgument'] [ 989.930865] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] [ 989.930865] env[61440]: INFO nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Terminating instance [ 989.933981] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.934220] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.934965] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 989.935196] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 989.935434] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a38f33b3-bfce-4465-9f2d-8e745165d734 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.938170] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368360a4-5bbb-4d5e-95d3-1e1aa60e7008 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.945534] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 989.945709] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-678bf123-0d7d-49cd-8f85-98f65b8dc2c8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.947857] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.948042] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 989.948989] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e77fc98a-f654-4bef-a91d-7d0bff7de761 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.953599] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 989.953599] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5248fdc8-f788-0954-0cd3-f42d6c1d786f" [ 989.953599] env[61440]: _type = "Task" [ 989.953599] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.961872] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5248fdc8-f788-0954-0cd3-f42d6c1d786f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.027713] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 990.027964] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 990.028158] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Deleting the datastore file [datastore2] a84b19ff-892b-43cb-9fd7-8f8b23f612a6 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.028430] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de575b43-f2f1-4bd0-8b45-ef56238a1574 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.034980] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 990.034980] env[61440]: value = "task-4281296" [ 990.034980] env[61440]: _type = "Task" [ 990.034980] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.043854] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': task-4281296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.468368] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 990.468766] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating directory with path [datastore2] vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.468924] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de9a154c-02bb-45bf-9370-b3f6a44bb394 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.480338] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Created directory with path [datastore2] vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.480542] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Fetch image to [datastore2] vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 990.480715] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 990.481528] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4d7603-338a-4b00-b971-67ada841d023 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.488520] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb50e9a7-399a-4984-a886-c2d142a34a26 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.497762] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4df4d49-41c4-4e41-82c2-357b57e4e506 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.530196] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aac0f2-cc4a-4d49-a16d-46a0259a2fc5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.539244] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-57c9b4cb-451a-4ea0-80b2-c7bb25666728 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.545760] env[61440]: DEBUG oslo_vmware.api [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': task-4281296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086483} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.545928] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.546140] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 990.546325] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 990.546514] env[61440]: INFO nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Took 0.61 seconds to destroy the instance on the hypervisor. [ 990.548757] env[61440]: DEBUG nova.compute.claims [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 990.548913] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.549123] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.560672] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 990.624946] env[61440]: DEBUG oslo_vmware.rw_handles [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 990.696497] env[61440]: DEBUG oslo_vmware.rw_handles [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 990.696733] env[61440]: DEBUG oslo_vmware.rw_handles [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 991.157830] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e593db-9eb9-45e3-b264-fdc1ae082f1d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.167848] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02211185-7417-45dd-b0db-3bdb86920276 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.200431] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd349cb8-7ec0-4cb2-bc7f-ce156fde2de1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.208297] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3667de1e-7f93-4d50-8f7c-be5d7810fea6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.221543] env[61440]: DEBUG nova.compute.provider_tree [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.232071] env[61440]: DEBUG nova.scheduler.client.report [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.258710] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.709s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.259696] env[61440]: ERROR nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.259696] env[61440]: Faults: ['InvalidArgument'] [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Traceback (most recent call last): [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self.driver.spawn(context, instance, image_meta, [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self._fetch_image_if_missing(context, vi) [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] image_cache(vi, tmp_image_ds_loc) [ 991.259696] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] vm_util.copy_virtual_disk( [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] session._wait_for_task(vmdk_copy_task) [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] return self.wait_for_task(task_ref) [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] return evt.wait() [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] result = hub.switch() [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] return self.greenlet.switch() [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 991.260284] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] self.f(*self.args, **self.kw) [ 991.260653] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 991.260653] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] raise exceptions.translate_fault(task_info.error) [ 991.260653] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.260653] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Faults: ['InvalidArgument'] [ 991.260653] env[61440]: ERROR nova.compute.manager [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] [ 991.261860] env[61440]: DEBUG nova.compute.utils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 991.263771] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Build of instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 was re-scheduled: A specified parameter was not correct: fileType [ 991.263771] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 991.263989] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 991.264513] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 991.264513] env[61440]: DEBUG nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 991.264513] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.031620] env[61440]: DEBUG nova.network.neutron [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.043987] env[61440]: INFO nova.compute.manager [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Took 0.78 seconds to deallocate network for instance. [ 992.180023] env[61440]: INFO nova.scheduler.client.report [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Deleted allocations for instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 [ 992.202897] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a4a9cbaa-5483-4311-a15b-8485a7a2b2f6 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 438.791s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.204427] env[61440]: DEBUG oslo_concurrency.lockutils [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 240.305s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.204427] env[61440]: DEBUG oslo_concurrency.lockutils [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.204588] env[61440]: DEBUG oslo_concurrency.lockutils [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.204730] env[61440]: DEBUG oslo_concurrency.lockutils [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.207036] env[61440]: INFO nova.compute.manager [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Terminating instance [ 992.209105] env[61440]: DEBUG nova.compute.manager [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 992.209310] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 992.209802] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a2a2992-d56f-478d-b235-2d4b5c02567b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.218621] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e5f395-70e5-4d5b-a529-85c67d5dd1ea {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.230849] env[61440]: DEBUG nova.compute.manager [None req-c09f1745-8c4f-429f-92f8-3dbfa6e50551 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] [instance: 09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.254838] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a84b19ff-892b-43cb-9fd7-8f8b23f612a6 could not be found. [ 992.255396] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.255609] env[61440]: INFO nova.compute.manager [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 992.255866] env[61440]: DEBUG oslo.service.loopingcall [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.256154] env[61440]: DEBUG nova.compute.manager [-] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 992.256291] env[61440]: DEBUG nova.network.neutron [-] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.261142] env[61440]: DEBUG nova.compute.manager [None req-c09f1745-8c4f-429f-92f8-3dbfa6e50551 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] [instance: 09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 992.288762] env[61440]: DEBUG nova.network.neutron [-] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.290880] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c09f1745-8c4f-429f-92f8-3dbfa6e50551 tempest-VolumesAdminNegativeTest-1471883772 tempest-VolumesAdminNegativeTest-1471883772-project-member] Lock "09c9aa1a-7fc0-4e6c-ad6a-55eb79fda535" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.517s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.298402] env[61440]: INFO nova.compute.manager [-] [instance: a84b19ff-892b-43cb-9fd7-8f8b23f612a6] Took 0.04 seconds to deallocate network for instance. [ 992.309905] env[61440]: DEBUG nova.compute.manager [None req-48d6d9f5-4ce6-4121-97b9-3c879a65e191 tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] [instance: f455e5bd-301f-4b08-8d41-41c969ace4f6] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.335436] env[61440]: DEBUG nova.compute.manager [None req-48d6d9f5-4ce6-4121-97b9-3c879a65e191 tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] [instance: f455e5bd-301f-4b08-8d41-41c969ace4f6] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 992.366985] env[61440]: DEBUG oslo_concurrency.lockutils [None req-48d6d9f5-4ce6-4121-97b9-3c879a65e191 tempest-AttachVolumeTestJSON-366293248 tempest-AttachVolumeTestJSON-366293248-project-member] Lock "f455e5bd-301f-4b08-8d41-41c969ace4f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.970s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.380803] env[61440]: DEBUG nova.compute.manager [None req-b57d2d32-329b-4cd2-b56b-3f4ca79e7e7f tempest-ServerDiagnosticsV248Test-1386053675 tempest-ServerDiagnosticsV248Test-1386053675-project-member] [instance: 4358437a-d336-44d2-b069-60b4992adc77] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.419561] env[61440]: DEBUG nova.compute.manager [None req-b57d2d32-329b-4cd2-b56b-3f4ca79e7e7f tempest-ServerDiagnosticsV248Test-1386053675 tempest-ServerDiagnosticsV248Test-1386053675-project-member] [instance: 4358437a-d336-44d2-b069-60b4992adc77] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 992.441527] env[61440]: DEBUG oslo_concurrency.lockutils [None req-36c8c175-a8e9-459c-921a-bd994a0b81b4 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "a84b19ff-892b-43cb-9fd7-8f8b23f612a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.237s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.448564] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b57d2d32-329b-4cd2-b56b-3f4ca79e7e7f tempest-ServerDiagnosticsV248Test-1386053675 tempest-ServerDiagnosticsV248Test-1386053675-project-member] Lock "4358437a-d336-44d2-b069-60b4992adc77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.631s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.463934] env[61440]: DEBUG nova.compute.manager [None req-7df9bef3-d9f8-4b19-bd2d-5a77e5f15b44 tempest-InstanceActionsTestJSON-57248931 tempest-InstanceActionsTestJSON-57248931-project-member] [instance: 20acf443-e7c3-43c8-8203-23a257532c13] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.513327] env[61440]: DEBUG nova.compute.manager [None req-7df9bef3-d9f8-4b19-bd2d-5a77e5f15b44 tempest-InstanceActionsTestJSON-57248931 tempest-InstanceActionsTestJSON-57248931-project-member] [instance: 20acf443-e7c3-43c8-8203-23a257532c13] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 992.546891] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7df9bef3-d9f8-4b19-bd2d-5a77e5f15b44 tempest-InstanceActionsTestJSON-57248931 tempest-InstanceActionsTestJSON-57248931-project-member] Lock "20acf443-e7c3-43c8-8203-23a257532c13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.105s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.558138] env[61440]: DEBUG nova.compute.manager [None req-8ccdcc32-f532-419d-b1ec-f10627704f0d tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: e3447d16-79a6-4b5a-bdc0-f148276b48f8] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.584804] env[61440]: DEBUG nova.compute.manager [None req-8ccdcc32-f532-419d-b1ec-f10627704f0d tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] [instance: e3447d16-79a6-4b5a-bdc0-f148276b48f8] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 992.607620] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8ccdcc32-f532-419d-b1ec-f10627704f0d tempest-MigrationsAdminTest-1548070187 tempest-MigrationsAdminTest-1548070187-project-member] Lock "e3447d16-79a6-4b5a-bdc0-f148276b48f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.934s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.622998] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.702175] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.702513] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.704071] env[61440]: INFO nova.compute.claims [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.289755] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d48f800-74d9-464e-9d88-871c0fcb3fbe {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.302209] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee88d8a-795a-405b-983a-b15d32ed1d0b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.338126] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359bf4c6-457a-4129-9797-c20d965e6c9e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.347271] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731d1239-06d8-4a45-9865-f67283b3645e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.359561] env[61440]: DEBUG nova.compute.provider_tree [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.370863] env[61440]: DEBUG nova.scheduler.client.report [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.389628] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.687s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.390138] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 993.430688] env[61440]: DEBUG nova.compute.utils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 993.432043] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 993.432286] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 993.443150] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 993.495193] env[61440]: DEBUG nova.policy [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '719d773060694d48aacfb9fe21f9c8ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebf7143ce68b47bfb93e66b2aa5cc890', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 993.552942] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 993.584394] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 993.584734] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 993.584935] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.585404] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 993.585631] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.585787] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 993.586623] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 993.586623] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 993.586623] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 993.587285] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 993.588070] env[61440]: DEBUG nova.virt.hardware [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.588769] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900edf69-3165-4f24-9b74-42789a9b2fcf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.598935] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc870ee-74a5-46bb-bb85-af0e0a030ef7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.947824] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Successfully created port: a46ba805-517a-4584-8469-49171e55baaa {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.122643] env[61440]: DEBUG nova.compute.manager [req-1b5b3d54-4bdd-41f9-a777-2bb6927146fe req-b3e85d0e-c329-4986-93a5-5140a602b598 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Received event network-vif-plugged-a46ba805-517a-4584-8469-49171e55baaa {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 995.122906] env[61440]: DEBUG oslo_concurrency.lockutils [req-1b5b3d54-4bdd-41f9-a777-2bb6927146fe req-b3e85d0e-c329-4986-93a5-5140a602b598 service nova] Acquiring lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.123456] env[61440]: DEBUG oslo_concurrency.lockutils [req-1b5b3d54-4bdd-41f9-a777-2bb6927146fe req-b3e85d0e-c329-4986-93a5-5140a602b598 service nova] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.125725] env[61440]: DEBUG oslo_concurrency.lockutils [req-1b5b3d54-4bdd-41f9-a777-2bb6927146fe req-b3e85d0e-c329-4986-93a5-5140a602b598 service nova] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.125725] env[61440]: DEBUG nova.compute.manager [req-1b5b3d54-4bdd-41f9-a777-2bb6927146fe req-b3e85d0e-c329-4986-93a5-5140a602b598 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] No waiting events found dispatching network-vif-plugged-a46ba805-517a-4584-8469-49171e55baaa {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 995.125725] env[61440]: WARNING nova.compute.manager [req-1b5b3d54-4bdd-41f9-a777-2bb6927146fe req-b3e85d0e-c329-4986-93a5-5140a602b598 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Received unexpected event network-vif-plugged-a46ba805-517a-4584-8469-49171e55baaa for instance with vm_state building and task_state spawning. [ 995.321332] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Successfully updated port: a46ba805-517a-4584-8469-49171e55baaa {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.339889] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "refresh_cache-6765defd-cd4d-49e2-a734-7b3cccca8bbd" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.340042] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "refresh_cache-6765defd-cd4d-49e2-a734-7b3cccca8bbd" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.340214] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 995.418138] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 995.808077] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Updating instance_info_cache with network_info: [{"id": "a46ba805-517a-4584-8469-49171e55baaa", "address": "fa:16:3e:17:a4:c7", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa46ba805-51", "ovs_interfaceid": "a46ba805-517a-4584-8469-49171e55baaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.839385] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "refresh_cache-6765defd-cd4d-49e2-a734-7b3cccca8bbd" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.840043] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance network_info: |[{"id": "a46ba805-517a-4584-8469-49171e55baaa", "address": "fa:16:3e:17:a4:c7", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa46ba805-51", "ovs_interfaceid": "a46ba805-517a-4584-8469-49171e55baaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 995.840224] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:a4:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a46ba805-517a-4584-8469-49171e55baaa', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.850740] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating folder: Project (ebf7143ce68b47bfb93e66b2aa5cc890). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 995.852174] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b5b5dcd-2bf2-41fa-91c7-e901c4fefe7e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.865519] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created folder: Project (ebf7143ce68b47bfb93e66b2aa5cc890) in parent group-v843372. [ 995.865928] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating folder: Instances. Parent ref: group-v843433. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 995.866582] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c49f68e3-8cb4-469e-b789-d33dae8ee54d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.877113] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created folder: Instances in parent group-v843433. [ 995.877800] env[61440]: DEBUG oslo.service.loopingcall [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.877800] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 995.877800] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12505173-4b41-4c93-b289-da677cf77337 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.908152] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.908152] env[61440]: value = "task-4281299" [ 995.908152] env[61440]: _type = "Task" [ 995.908152] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.916530] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281299, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.421560] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281299, 'name': CreateVM_Task, 'duration_secs': 0.29802} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.421560] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 996.423729] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.425640] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.426044] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 996.426380] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-835774c9-0220-4118-a49a-9308000e82fd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.432694] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 996.432694] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524d57e0-fa8c-4c11-fbc6-fd57b1aede7a" [ 996.432694] env[61440]: _type = "Task" [ 996.432694] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.441195] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524d57e0-fa8c-4c11-fbc6-fd57b1aede7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.947708] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.947972] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.948241] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.354359] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "fd9b654a-0651-46ae-a7c9-30743b875e2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.354613] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.414436] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "d5e3b4fc-b970-4162-a8af-e40ed91f4575" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.414645] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "d5e3b4fc-b970-4162-a8af-e40ed91f4575" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.424025] env[61440]: DEBUG nova.compute.manager [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Received event network-changed-a46ba805-517a-4584-8469-49171e55baaa {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 997.424025] env[61440]: DEBUG nova.compute.manager [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Refreshing instance network info cache due to event network-changed-a46ba805-517a-4584-8469-49171e55baaa. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 997.424025] env[61440]: DEBUG oslo_concurrency.lockutils [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] Acquiring lock "refresh_cache-6765defd-cd4d-49e2-a734-7b3cccca8bbd" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.424025] env[61440]: DEBUG oslo_concurrency.lockutils [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] Acquired lock "refresh_cache-6765defd-cd4d-49e2-a734-7b3cccca8bbd" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.424025] env[61440]: DEBUG nova.network.neutron [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Refreshing network info cache for port a46ba805-517a-4584-8469-49171e55baaa {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 997.455944] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "b3054adf-d5b8-4c79-8ae3-ffb4deb745b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.456307] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "b3054adf-d5b8-4c79-8ae3-ffb4deb745b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.848733] env[61440]: DEBUG nova.network.neutron [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Updated VIF entry in instance network info cache for port a46ba805-517a-4584-8469-49171e55baaa. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 997.848733] env[61440]: DEBUG nova.network.neutron [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Updating instance_info_cache with network_info: [{"id": "a46ba805-517a-4584-8469-49171e55baaa", "address": "fa:16:3e:17:a4:c7", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa46ba805-51", "ovs_interfaceid": "a46ba805-517a-4584-8469-49171e55baaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.868746] env[61440]: DEBUG oslo_concurrency.lockutils [req-666f1eee-de78-4022-a5d5-a2f2a9f7368e req-270032cf-0e7a-48d9-9c29-392609be6421 service nova] Releasing lock "refresh_cache-6765defd-cd4d-49e2-a734-7b3cccca8bbd" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.364555] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.403394] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0565004a-e8b7-4419-9ab6-908c95073f77 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] Acquiring lock "5d96681c-395d-4a15-a699-cf30a4d69827" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.403636] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0565004a-e8b7-4419-9ab6-908c95073f77 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] Lock "5d96681c-395d-4a15-a699-cf30a4d69827" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.908350] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Acquiring lock "08e64faa-b841-4ee2-9fe7-a74a5b100b99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.908350] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "08e64faa-b841-4ee2-9fe7-a74a5b100b99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.934368] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Acquiring lock "039393b3-b017-41b4-ab38-7675d72101d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.934368] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "039393b3-b017-41b4-ab38-7675d72101d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.393553] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9d2f23c-4085-41d1-92c9-2a49665af44b tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "20fcd3fa-cc08-4b89-af9d-5a6241864946" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.393553] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9d2f23c-4085-41d1-92c9-2a49665af44b tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "20fcd3fa-cc08-4b89-af9d-5a6241864946" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.720410] env[61440]: DEBUG oslo_concurrency.lockutils [None req-133a0b20-ffc8-48e8-a046-42046e7b99cd tempest-InstanceActionsV221TestJSON-2096217336 tempest-InstanceActionsV221TestJSON-2096217336-project-member] Acquiring lock "dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.721130] env[61440]: DEBUG oslo_concurrency.lockutils [None req-133a0b20-ffc8-48e8-a046-42046e7b99cd tempest-InstanceActionsV221TestJSON-2096217336 tempest-InstanceActionsV221TestJSON-2096217336-project-member] Lock "dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.275828] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.275828] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.275828] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1039.274486] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.274796] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1039.274796] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1039.297309] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.297704] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.297981] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.298323] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.298591] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.298851] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.299123] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.299389] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.300080] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.300080] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1039.300080] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1039.300393] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.625076] env[61440]: WARNING oslo_vmware.rw_handles [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1039.625076] env[61440]: ERROR oslo_vmware.rw_handles [ 1039.625527] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1039.627591] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1039.627852] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Copying Virtual Disk [datastore2] vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/9662b8f9-0452-496e-a83a-a94c5cad3521/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1039.628171] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8782024-8485-41b4-9532-e81f9d66a0f9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.635572] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 1039.635572] env[61440]: value = "task-4281300" [ 1039.635572] env[61440]: _type = "Task" [ 1039.635572] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.643572] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': task-4281300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.147036] env[61440]: DEBUG oslo_vmware.exceptions [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1040.147351] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.147907] env[61440]: ERROR nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.147907] env[61440]: Faults: ['InvalidArgument'] [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Traceback (most recent call last): [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] yield resources [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self.driver.spawn(context, instance, image_meta, [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self._fetch_image_if_missing(context, vi) [ 1040.147907] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] image_cache(vi, tmp_image_ds_loc) [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] vm_util.copy_virtual_disk( [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] session._wait_for_task(vmdk_copy_task) [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] return self.wait_for_task(task_ref) [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] return evt.wait() [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] result = hub.switch() [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1040.148317] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] return self.greenlet.switch() [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self.f(*self.args, **self.kw) [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] raise exceptions.translate_fault(task_info.error) [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Faults: ['InvalidArgument'] [ 1040.148721] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] [ 1040.148721] env[61440]: INFO nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Terminating instance [ 1040.149835] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.150110] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.150725] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1040.150934] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.151180] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5978828-281f-4baf-825e-0cae90cb2660 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.153719] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e2e564-7b8d-4ced-a0b2-40c1a3ec3000 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.160738] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1040.160972] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69da0663-5cde-4d56-9a0d-e22d29b19d4c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.163223] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.164044] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1040.164358] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d8cc777-f1c1-4a30-bbf5-eb97219a783d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.169831] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Waiting for the task: (returnval){ [ 1040.169831] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ba4805-1375-63c0-bf99-5a415f2a5de8" [ 1040.169831] env[61440]: _type = "Task" [ 1040.169831] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.177458] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ba4805-1375-63c0-bf99-5a415f2a5de8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.227374] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1040.227594] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1040.227945] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Deleting the datastore file [datastore2] f2e370a1-6644-4794-8c9c-0ac9d7a4c156 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.228045] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f690e492-30f5-4c2d-8f48-8be784c135f4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.234762] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 1040.234762] env[61440]: value = "task-4281302" [ 1040.234762] env[61440]: _type = "Task" [ 1040.234762] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.242429] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': task-4281302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.274214] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.680392] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1040.680674] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Creating directory with path [datastore2] vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.680920] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96a13214-c607-447f-a55f-aaa9e1076e94 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.695787] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Created directory with path [datastore2] vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.695787] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Fetch image to [datastore2] vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1040.696021] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1040.697083] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3fcd74-1d55-46ad-a531-4324da1888ee {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.703929] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae23bb6-87db-4836-a216-100f4444ff0f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.712747] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664a6c76-c7fb-4aa2-8b9f-1388aac0d833 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.748140] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51c868f-d984-4674-b5f4-2188a3f67bc8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.755405] env[61440]: DEBUG oslo_vmware.api [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': task-4281302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079653} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.756607] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.756802] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1040.756976] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1040.757170] env[61440]: INFO nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1040.758946] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-44affe67-353e-4939-8908-95336cc03cb1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.760867] env[61440]: DEBUG nova.compute.claims [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1040.761052] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.761267] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.785612] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1040.851919] env[61440]: DEBUG oslo_vmware.rw_handles [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1040.912551] env[61440]: DEBUG oslo_vmware.rw_handles [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1040.915518] env[61440]: DEBUG oslo_vmware.rw_handles [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1041.275060] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.276078] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e5134c-f91b-4d0f-841b-d4b72b8c6976 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.280773] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.280773] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.286132] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10358b81-f7ac-4a6d-9570-1f925ec94493 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.319829] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2d79ec-1913-4fd2-b658-9e47e31a47e5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.325029] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.336273] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce160592-172f-4053-b187-f0b0ddc994a3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.351627] env[61440]: DEBUG nova.compute.provider_tree [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.365996] env[61440]: DEBUG nova.scheduler.client.report [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.395513] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.634s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.396050] env[61440]: ERROR nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1041.396050] env[61440]: Faults: ['InvalidArgument'] [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Traceback (most recent call last): [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self.driver.spawn(context, instance, image_meta, [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self._fetch_image_if_missing(context, vi) [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] image_cache(vi, tmp_image_ds_loc) [ 1041.396050] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] vm_util.copy_virtual_disk( [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] session._wait_for_task(vmdk_copy_task) [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] return self.wait_for_task(task_ref) [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] return evt.wait() [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] result = hub.switch() [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] return self.greenlet.switch() [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1041.396380] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] self.f(*self.args, **self.kw) [ 1041.396679] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1041.396679] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] raise exceptions.translate_fault(task_info.error) [ 1041.396679] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1041.396679] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Faults: ['InvalidArgument'] [ 1041.396679] env[61440]: ERROR nova.compute.manager [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] [ 1041.396801] env[61440]: DEBUG nova.compute.utils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1041.398050] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.074s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.398215] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.398373] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1041.399122] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Build of instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 was re-scheduled: A specified parameter was not correct: fileType [ 1041.399122] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1041.399515] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1041.399687] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1041.399872] env[61440]: DEBUG nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1041.400010] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.404193] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ddee40-addd-422c-9b3e-18ed5b55c88d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.411438] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e40cd6e-2bc0-4d2c-a3d6-4222eb76940c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.433579] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61be14cf-62d0-4135-9ddb-43ac4ef47727 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.442284] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2da242-10e0-4594-a50e-d22c47b2915c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.472280] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180625MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1041.472435] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.472626] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.594672] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.594882] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.594941] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595064] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595195] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595317] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595432] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595546] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595659] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.595770] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1041.611602] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.631842] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.646406] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 177602d8-99ae-40df-ac3b-63374dde5715 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.661671] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.675117] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 01f72fa9-b392-4789-bc71-6339634efc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.712244] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b5c17233-358d-489d-8897-96cc38427164 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.725691] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 67e6bf8c-34cb-4918-9680-a707ffd09e35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.738101] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a14259c3-7af5-4d14-866a-48763fe2faaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.751138] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 51f670be-26a7-4248-a0b7-386968bed988 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.763901] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.785750] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d5e3b4fc-b970-4162-a8af-e40ed91f4575 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.808082] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b3054adf-d5b8-4c79-8ae3-ffb4deb745b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.823783] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 5d96681c-395d-4a15-a699-cf30a4d69827 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.841130] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 08e64faa-b841-4ee2-9fe7-a74a5b100b99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.861124] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 039393b3-b017-41b4-ab38-7675d72101d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.875686] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20fcd3fa-cc08-4b89-af9d-5a6241864946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.887166] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.887440] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1041.887607] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=183GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1042.291116] env[61440]: DEBUG nova.network.neutron [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.312429] env[61440]: INFO nova.compute.manager [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Took 0.91 seconds to deallocate network for instance. [ 1042.430720] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1644de-bf76-4804-8bb0-a72043ff918d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.441504] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7098ef8f-9001-4c23-91f7-db53023da1b4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.448711] env[61440]: INFO nova.scheduler.client.report [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Deleted allocations for instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 [ 1042.495254] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7fbf2274-7083-4492-9e9e-4a57f343c1da tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 485.009s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.496065] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9661397b-c93a-49d1-bccc-330ed86f24fa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.500014] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 84.153s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.500237] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.500438] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.500609] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.505327] env[61440]: INFO nova.compute.manager [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Terminating instance [ 1042.512572] env[61440]: DEBUG nova.compute.manager [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1042.512674] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1042.514370] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb425330-ebd2-41c4-ab36-427a29e68000 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.519323] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bac6eded-b94d-4390-a20e-25c6e1e119b6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.533724] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.538635] env[61440]: DEBUG nova.compute.manager [None req-71f29705-800a-439b-9298-ea61a62528c3 tempest-ServerRescueTestJSONUnderV235-1738786825 tempest-ServerRescueTestJSONUnderV235-1738786825-project-member] [instance: f0ad8a9b-780b-4714-8a33-d92b922cb143] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.543111] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426b652d-8e09-4141-8ce2-3b8376f86d9c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.556800] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.585230] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2e370a1-6644-4794-8c9c-0ac9d7a4c156 could not be found. [ 1042.585485] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1042.585715] env[61440]: INFO nova.compute.manager [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1042.586049] env[61440]: DEBUG oslo.service.loopingcall [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.586733] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1042.586926] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.114s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.587444] env[61440]: DEBUG nova.compute.manager [-] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1042.587552] env[61440]: DEBUG nova.network.neutron [-] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1042.590836] env[61440]: DEBUG nova.compute.manager [None req-71f29705-800a-439b-9298-ea61a62528c3 tempest-ServerRescueTestJSONUnderV235-1738786825 tempest-ServerRescueTestJSONUnderV235-1738786825-project-member] [instance: f0ad8a9b-780b-4714-8a33-d92b922cb143] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.616779] env[61440]: DEBUG nova.network.neutron [-] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.618969] env[61440]: DEBUG oslo_concurrency.lockutils [None req-71f29705-800a-439b-9298-ea61a62528c3 tempest-ServerRescueTestJSONUnderV235-1738786825 tempest-ServerRescueTestJSONUnderV235-1738786825-project-member] Lock "f0ad8a9b-780b-4714-8a33-d92b922cb143" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.442s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.625718] env[61440]: INFO nova.compute.manager [-] [instance: f2e370a1-6644-4794-8c9c-0ac9d7a4c156] Took 0.04 seconds to deallocate network for instance. [ 1042.631033] env[61440]: DEBUG nova.compute.manager [None req-e7e0c30e-1cd1-4e60-a00d-f42c447ba2df tempest-ServerPasswordTestJSON-1978182123 tempest-ServerPasswordTestJSON-1978182123-project-member] [instance: 1a610510-68a0-45ca-aeae-c07b28f14e8d] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.661385] env[61440]: DEBUG nova.compute.manager [None req-e7e0c30e-1cd1-4e60-a00d-f42c447ba2df tempest-ServerPasswordTestJSON-1978182123 tempest-ServerPasswordTestJSON-1978182123-project-member] [instance: 1a610510-68a0-45ca-aeae-c07b28f14e8d] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.698076] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e7e0c30e-1cd1-4e60-a00d-f42c447ba2df tempest-ServerPasswordTestJSON-1978182123 tempest-ServerPasswordTestJSON-1978182123-project-member] Lock "1a610510-68a0-45ca-aeae-c07b28f14e8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.077s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.710675] env[61440]: DEBUG nova.compute.manager [None req-752a1a01-be36-40c0-8893-c4fe132c8ba2 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] [instance: 0fd3612a-5309-403b-b853-599e731667cd] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.748131] env[61440]: DEBUG nova.compute.manager [None req-752a1a01-be36-40c0-8893-c4fe132c8ba2 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] [instance: 0fd3612a-5309-403b-b853-599e731667cd] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.757939] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b409ad0-7dce-458c-b8ad-240112bb9b87 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "f2e370a1-6644-4794-8c9c-0ac9d7a4c156" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.258s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.777976] env[61440]: DEBUG oslo_concurrency.lockutils [None req-752a1a01-be36-40c0-8893-c4fe132c8ba2 tempest-ServersTestMultiNic-539005190 tempest-ServersTestMultiNic-539005190-project-member] Lock "0fd3612a-5309-403b-b853-599e731667cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.373s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.791596] env[61440]: DEBUG nova.compute.manager [None req-083612fe-e426-4e04-a493-7b26e8d86448 tempest-ServerShowV257Test-808608694 tempest-ServerShowV257Test-808608694-project-member] [instance: 108d2f3d-2f7e-4757-a78f-77ccc82d831a] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.822576] env[61440]: DEBUG nova.compute.manager [None req-083612fe-e426-4e04-a493-7b26e8d86448 tempest-ServerShowV257Test-808608694 tempest-ServerShowV257Test-808608694-project-member] [instance: 108d2f3d-2f7e-4757-a78f-77ccc82d831a] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.846732] env[61440]: DEBUG oslo_concurrency.lockutils [None req-083612fe-e426-4e04-a493-7b26e8d86448 tempest-ServerShowV257Test-808608694 tempest-ServerShowV257Test-808608694-project-member] Lock "108d2f3d-2f7e-4757-a78f-77ccc82d831a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.240s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.866615] env[61440]: DEBUG nova.compute.manager [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: a757fd04-c309-4b79-ab13-47b70b97b79c] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.897409] env[61440]: DEBUG nova.compute.manager [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: a757fd04-c309-4b79-ab13-47b70b97b79c] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.927026] env[61440]: DEBUG oslo_concurrency.lockutils [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "a757fd04-c309-4b79-ab13-47b70b97b79c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.462s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.940020] env[61440]: DEBUG nova.compute.manager [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: e4a748bc-83c2-451a-bed4-f3534a649731] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.973385] env[61440]: DEBUG nova.compute.manager [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: e4a748bc-83c2-451a-bed4-f3534a649731] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1043.003131] env[61440]: DEBUG oslo_concurrency.lockutils [None req-23c64fda-8c40-4dbf-822d-04ef37dbca78 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "e4a748bc-83c2-451a-bed4-f3534a649731" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.509s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.017362] env[61440]: DEBUG nova.compute.manager [None req-a8ca43b6-2ce5-4bf7-b54c-2098920532f5 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: d49a7f3b-ead2-4933-9d7f-3a80ce34e306] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1043.048920] env[61440]: DEBUG nova.compute.manager [None req-a8ca43b6-2ce5-4bf7-b54c-2098920532f5 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: d49a7f3b-ead2-4933-9d7f-3a80ce34e306] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1043.080031] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a8ca43b6-2ce5-4bf7-b54c-2098920532f5 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "d49a7f3b-ead2-4933-9d7f-3a80ce34e306" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.080s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.103204] env[61440]: DEBUG nova.compute.manager [None req-30a26ca6-0f67-4d45-a750-7d8371a43784 tempest-ServerActionsTestOtherB-1257657847 tempest-ServerActionsTestOtherB-1257657847-project-member] [instance: cb538de5-4247-4096-93c3-039ea0081985] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1043.132399] env[61440]: DEBUG nova.compute.manager [None req-30a26ca6-0f67-4d45-a750-7d8371a43784 tempest-ServerActionsTestOtherB-1257657847 tempest-ServerActionsTestOtherB-1257657847-project-member] [instance: cb538de5-4247-4096-93c3-039ea0081985] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1043.165996] env[61440]: DEBUG oslo_concurrency.lockutils [None req-30a26ca6-0f67-4d45-a750-7d8371a43784 tempest-ServerActionsTestOtherB-1257657847 tempest-ServerActionsTestOtherB-1257657847-project-member] Lock "cb538de5-4247-4096-93c3-039ea0081985" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.281s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.177549] env[61440]: DEBUG nova.compute.manager [None req-6478e7ff-d80a-41eb-8f71-f0e4ba52806a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: 9866ff2f-53c3-4ed2-865f-d418b0541025] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1043.229036] env[61440]: DEBUG nova.compute.manager [None req-6478e7ff-d80a-41eb-8f71-f0e4ba52806a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] [instance: 9866ff2f-53c3-4ed2-865f-d418b0541025] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1043.266114] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6478e7ff-d80a-41eb-8f71-f0e4ba52806a tempest-DeleteServersAdminTestJSON-1675151757 tempest-DeleteServersAdminTestJSON-1675151757-project-member] Lock "9866ff2f-53c3-4ed2-865f-d418b0541025" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.718s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.279568] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1043.357342] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.357608] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.359145] env[61440]: INFO nova.compute.claims [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.583310] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.902026] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7feafdf0-c03c-49d3-a8fa-cb249967a3bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.906471] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433ca42f-cbf9-4298-90a4-768fd0c4b280 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.937771] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85627b7-68cb-4bb8-98be-139972e85630 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.945231] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331cd49b-b58e-459b-b78a-5dcd317c9472 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.959894] env[61440]: DEBUG nova.compute.provider_tree [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.971971] env[61440]: DEBUG nova.scheduler.client.report [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1043.991155] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.631s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.991155] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1044.064368] env[61440]: DEBUG nova.compute.utils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.065890] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1044.066115] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1044.082733] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1044.151890] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1044.189885] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1044.189885] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1044.189885] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.190079] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1044.190079] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.190079] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1044.190079] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1044.190079] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1044.190542] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1044.190874] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1044.191211] env[61440]: DEBUG nova.virt.hardware [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1044.192471] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1c687c-4ddf-450f-b593-361855a74318 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.200922] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678c8385-96ae-4364-9b64-85b975dceaaf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.393645] env[61440]: DEBUG nova.policy [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233ce8b8def34cf199ed0c17f0ccc717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d28d1a3f10499eb02cac15f357ac32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1044.924507] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Successfully created port: 6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.016789] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c06cdf45-2158-4786-9319-66a32cfccec2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Acquiring lock "a925189b-1de9-4c1c-bdec-b10db97b85d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.016789] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c06cdf45-2158-4786-9319-66a32cfccec2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "a925189b-1de9-4c1c-bdec-b10db97b85d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.769112] env[61440]: DEBUG nova.compute.manager [req-0948a305-7143-4c93-a952-8b7f526cb161 req-cc943270-dfe6-476c-ba1f-247e8d4ded8a service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Received event network-vif-plugged-6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1045.769112] env[61440]: DEBUG oslo_concurrency.lockutils [req-0948a305-7143-4c93-a952-8b7f526cb161 req-cc943270-dfe6-476c-ba1f-247e8d4ded8a service nova] Acquiring lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.769112] env[61440]: DEBUG oslo_concurrency.lockutils [req-0948a305-7143-4c93-a952-8b7f526cb161 req-cc943270-dfe6-476c-ba1f-247e8d4ded8a service nova] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.769112] env[61440]: DEBUG oslo_concurrency.lockutils [req-0948a305-7143-4c93-a952-8b7f526cb161 req-cc943270-dfe6-476c-ba1f-247e8d4ded8a service nova] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.769386] env[61440]: DEBUG nova.compute.manager [req-0948a305-7143-4c93-a952-8b7f526cb161 req-cc943270-dfe6-476c-ba1f-247e8d4ded8a service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] No waiting events found dispatching network-vif-plugged-6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1045.769386] env[61440]: WARNING nova.compute.manager [req-0948a305-7143-4c93-a952-8b7f526cb161 req-cc943270-dfe6-476c-ba1f-247e8d4ded8a service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Received unexpected event network-vif-plugged-6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 for instance with vm_state building and task_state spawning. [ 1045.897210] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Successfully updated port: 6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.911198] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "refresh_cache-16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.911771] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired lock "refresh_cache-16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.911771] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.970395] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1046.222415] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Updating instance_info_cache with network_info: [{"id": "6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2", "address": "fa:16:3e:22:b4:05", "network": {"id": "73467ae0-5655-4bd4-a32d-acdae0f79f52", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-287531447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9d28d1a3f10499eb02cac15f357ac32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d7bdec4-6f", "ovs_interfaceid": "6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.240029] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Releasing lock "refresh_cache-16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.240029] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance network_info: |[{"id": "6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2", "address": "fa:16:3e:22:b4:05", "network": {"id": "73467ae0-5655-4bd4-a32d-acdae0f79f52", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-287531447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9d28d1a3f10499eb02cac15f357ac32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d7bdec4-6f", "ovs_interfaceid": "6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1046.240335] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:b4:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15165046-2de9-4ada-9e99-0126e20854a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.256405] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating folder: Project (c9d28d1a3f10499eb02cac15f357ac32). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1046.257369] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45731d75-035b-40d5-89b9-134663c3566d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.272282] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Created folder: Project (c9d28d1a3f10499eb02cac15f357ac32) in parent group-v843372. [ 1046.272282] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating folder: Instances. Parent ref: group-v843436. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1046.272282] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b75e65f8-ea7f-4668-a5c1-c33be1aa0568 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.282821] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Created folder: Instances in parent group-v843436. [ 1046.283292] env[61440]: DEBUG oslo.service.loopingcall [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.283610] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1046.283933] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e260387c-c7cb-4f89-aff0-450d18092f10 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.304323] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.304323] env[61440]: value = "task-4281305" [ 1046.304323] env[61440]: _type = "Task" [ 1046.304323] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.313267] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281305, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.815051] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281305, 'name': CreateVM_Task, 'duration_secs': 0.310127} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.815161] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1046.815827] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.815994] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.816316] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.816565] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-188771c5-1975-4f67-9588-e5fc890fbbf6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.821166] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1046.821166] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]522df652-95e8-77f0-c20a-1abfe9e650ff" [ 1046.821166] env[61440]: _type = "Task" [ 1046.821166] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.833810] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]522df652-95e8-77f0-c20a-1abfe9e650ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.331879] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.332220] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.332486] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.855762] env[61440]: DEBUG nova.compute.manager [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Received event network-changed-6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1047.856106] env[61440]: DEBUG nova.compute.manager [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Refreshing instance network info cache due to event network-changed-6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1047.856106] env[61440]: DEBUG oslo_concurrency.lockutils [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] Acquiring lock "refresh_cache-16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.856254] env[61440]: DEBUG oslo_concurrency.lockutils [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] Acquired lock "refresh_cache-16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.856483] env[61440]: DEBUG nova.network.neutron [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Refreshing network info cache for port 6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1048.232437] env[61440]: DEBUG nova.network.neutron [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Updated VIF entry in instance network info cache for port 6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1048.232806] env[61440]: DEBUG nova.network.neutron [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Updating instance_info_cache with network_info: [{"id": "6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2", "address": "fa:16:3e:22:b4:05", "network": {"id": "73467ae0-5655-4bd4-a32d-acdae0f79f52", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-287531447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9d28d1a3f10499eb02cac15f357ac32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d7bdec4-6f", "ovs_interfaceid": "6d7bdec4-6f80-4d0d-8f65-09faef0bf9c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.243585] env[61440]: DEBUG oslo_concurrency.lockutils [req-40c65824-8983-4d50-aaa3-a529e882bb78 req-f26ec240-4347-4d2e-8ce3-5987b78fd3d1 service nova] Releasing lock "refresh_cache-16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.191537] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.191831] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.368444] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6e015cf-eca5-42e4-9c87-140d5028c534 tempest-AttachInterfacesUnderV243Test-186038686 tempest-AttachInterfacesUnderV243Test-186038686-project-member] Acquiring lock "047b54df-a55e-4e18-87f7-835466d9581e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.368745] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6e015cf-eca5-42e4-9c87-140d5028c534 tempest-AttachInterfacesUnderV243Test-186038686 tempest-AttachInterfacesUnderV243Test-186038686-project-member] Lock "047b54df-a55e-4e18-87f7-835466d9581e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.084208] env[61440]: WARNING oslo_vmware.rw_handles [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1089.084208] env[61440]: ERROR oslo_vmware.rw_handles [ 1089.084846] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1089.086771] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1089.087066] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Copying Virtual Disk [datastore2] vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/2e3fea82-1f6b-4e73-9163-5c3e2f3dc0ed/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1089.087391] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6231aefb-58f3-43e7-bf71-dff25477a401 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.095596] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Waiting for the task: (returnval){ [ 1089.095596] env[61440]: value = "task-4281306" [ 1089.095596] env[61440]: _type = "Task" [ 1089.095596] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.103692] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Task: {'id': task-4281306, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.605589] env[61440]: DEBUG oslo_vmware.exceptions [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1089.605868] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.606457] env[61440]: ERROR nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1089.606457] env[61440]: Faults: ['InvalidArgument'] [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Traceback (most recent call last): [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] yield resources [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self.driver.spawn(context, instance, image_meta, [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self._fetch_image_if_missing(context, vi) [ 1089.606457] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] image_cache(vi, tmp_image_ds_loc) [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] vm_util.copy_virtual_disk( [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] session._wait_for_task(vmdk_copy_task) [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] return self.wait_for_task(task_ref) [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] return evt.wait() [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] result = hub.switch() [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1089.606967] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] return self.greenlet.switch() [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self.f(*self.args, **self.kw) [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] raise exceptions.translate_fault(task_info.error) [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Faults: ['InvalidArgument'] [ 1089.607377] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] [ 1089.607377] env[61440]: INFO nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Terminating instance [ 1089.608835] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.609090] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.609719] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1089.609909] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1089.610146] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13942c80-d9bc-4024-a655-29d8096d1136 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.612340] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982beb58-25ed-43e5-b871-9f26f1113130 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.619926] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1089.620178] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c109b17-a339-4c01-ae88-0d6021f5dbbb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.622161] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.622337] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1089.623246] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c86bda72-5cf2-4edf-829b-f0e0328681ca {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.627551] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 1089.627551] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52bd202f-1f72-73fd-0f06-a135475435e7" [ 1089.627551] env[61440]: _type = "Task" [ 1089.627551] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.641701] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1089.641933] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating directory with path [datastore2] vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.642194] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ca3eb8d-7600-4b4e-9732-cdd8ba9393b9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.663157] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Created directory with path [datastore2] vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.663357] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Fetch image to [datastore2] vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1089.663531] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1089.664323] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6305c1af-0374-4ee1-baf0-8ca4659d4166 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.670901] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefdc53a-8fc9-49fd-8fd0-38124cc1fdf8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.679749] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1a6733-a07f-4570-9c3d-ea44a8b42b85 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.710289] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18c45ca-5422-4740-8cda-f93cd603aded {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.712815] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1089.713031] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1089.713212] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Deleting the datastore file [datastore2] 2d853a6e-4c2f-401e-9088-54e82bec1150 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.713439] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-889c9d7e-faba-4822-9299-b3d2dd994f69 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.718058] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9ac3d6cb-f14d-4c7f-a407-a42c6bf65c0a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.720794] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Waiting for the task: (returnval){ [ 1089.720794] env[61440]: value = "task-4281308" [ 1089.720794] env[61440]: _type = "Task" [ 1089.720794] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.728013] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Task: {'id': task-4281308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.740388] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1089.791197] env[61440]: DEBUG oslo_vmware.rw_handles [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1089.853935] env[61440]: DEBUG oslo_vmware.rw_handles [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1089.854249] env[61440]: DEBUG oslo_vmware.rw_handles [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1090.231108] env[61440]: DEBUG oslo_vmware.api [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Task: {'id': task-4281308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069518} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.231375] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.231573] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1090.231729] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1090.231904] env[61440]: INFO nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1090.234029] env[61440]: DEBUG nova.compute.claims [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1090.234216] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.234443] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.611301] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5aa21f3-3682-4f1f-9c08-9a6c5d024e82 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.622981] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ef7664-b3cf-4f7b-85d6-4d55c88607af {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.648359] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43077c5-c314-4b98-9aad-5b7ba0f8b3f2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.655570] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fbdafb-b23e-4cb5-a65e-ad58a717c5bd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.669179] env[61440]: DEBUG nova.compute.provider_tree [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.677903] env[61440]: DEBUG nova.scheduler.client.report [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.691816] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.457s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.692369] env[61440]: ERROR nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1090.692369] env[61440]: Faults: ['InvalidArgument'] [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Traceback (most recent call last): [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self.driver.spawn(context, instance, image_meta, [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self._fetch_image_if_missing(context, vi) [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] image_cache(vi, tmp_image_ds_loc) [ 1090.692369] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] vm_util.copy_virtual_disk( [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] session._wait_for_task(vmdk_copy_task) [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] return self.wait_for_task(task_ref) [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] return evt.wait() [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] result = hub.switch() [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] return self.greenlet.switch() [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1090.692762] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] self.f(*self.args, **self.kw) [ 1090.693139] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1090.693139] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] raise exceptions.translate_fault(task_info.error) [ 1090.693139] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1090.693139] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Faults: ['InvalidArgument'] [ 1090.693139] env[61440]: ERROR nova.compute.manager [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] [ 1090.693139] env[61440]: DEBUG nova.compute.utils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1090.694625] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Build of instance 2d853a6e-4c2f-401e-9088-54e82bec1150 was re-scheduled: A specified parameter was not correct: fileType [ 1090.694625] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1090.695046] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1090.695230] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1090.695390] env[61440]: DEBUG nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1090.695552] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.112464] env[61440]: DEBUG nova.network.neutron [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.129759] env[61440]: INFO nova.compute.manager [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Took 0.43 seconds to deallocate network for instance. [ 1091.234698] env[61440]: INFO nova.scheduler.client.report [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Deleted allocations for instance 2d853a6e-4c2f-401e-9088-54e82bec1150 [ 1091.254541] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5fe9ca3d-47cc-49f2-813d-80f9a9debbc5 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 532.852s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.256061] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 333.685s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.256429] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Acquiring lock "2d853a6e-4c2f-401e-9088-54e82bec1150-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.256664] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.256843] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.258740] env[61440]: INFO nova.compute.manager [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Terminating instance [ 1091.260455] env[61440]: DEBUG nova.compute.manager [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1091.260669] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.261324] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8eb5d292-1800-4328-b268-49882334bd40 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.265960] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1091.273013] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b97cfa2-3bf3-42a0-a98b-b7cf18363093 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.284716] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.284868] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances with incomplete migration {{(pid=61440) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1091.305648] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2d853a6e-4c2f-401e-9088-54e82bec1150 could not be found. [ 1091.305868] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1091.306382] env[61440]: INFO nova.compute.manager [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1091.306382] env[61440]: DEBUG oslo.service.loopingcall [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.307072] env[61440]: DEBUG nova.compute.manager [-] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1091.307072] env[61440]: DEBUG nova.network.neutron [-] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.336338] env[61440]: DEBUG nova.network.neutron [-] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.346080] env[61440]: INFO nova.compute.manager [-] [instance: 2d853a6e-4c2f-401e-9088-54e82bec1150] Took 0.04 seconds to deallocate network for instance. [ 1091.351953] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.352201] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.353559] env[61440]: INFO nova.compute.claims [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.430139] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d20f05c9-6de3-41a1-b7f7-3a41cb445811 tempest-ServersAdminNegativeTestJSON-605078312 tempest-ServersAdminNegativeTestJSON-605078312-project-member] Lock "2d853a6e-4c2f-401e-9088-54e82bec1150" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.741302] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed3b434-9996-4cfc-80c9-35b386477a7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.748703] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb10af0f-39e0-4cc7-9eae-3c4ab6ac072b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.778341] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ea172d-54b7-4376-9bb6-183654a0059c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.785048] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfd0b2c-a3a2-4fdb-a5c1-e8ccc4c5f3e0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.797705] env[61440]: DEBUG nova.compute.provider_tree [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.807642] env[61440]: DEBUG nova.scheduler.client.report [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1091.820590] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.468s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.821069] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1091.851160] env[61440]: DEBUG nova.compute.utils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1091.852650] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1091.852650] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1091.865445] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1091.921468] env[61440]: DEBUG nova.policy [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233ce8b8def34cf199ed0c17f0ccc717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d28d1a3f10499eb02cac15f357ac32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1091.930792] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1091.956222] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1091.956529] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1091.956703] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.956898] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1091.957068] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.957227] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1091.957453] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1091.957618] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1091.957790] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1091.957957] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1091.958155] env[61440]: DEBUG nova.virt.hardware [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1091.959021] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce035526-c61d-41b3-bca9-18e84652f1f0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.967480] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5952de2-c118-4257-b75f-65f3448ab7e7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.272819] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Successfully created port: 60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.132776] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Successfully updated port: 60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.156075] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "refresh_cache-faf90964-1814-459f-89ef-0a27808077c1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.156288] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired lock "refresh_cache-faf90964-1814-459f-89ef-0a27808077c1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.156599] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.216966] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.237965] env[61440]: DEBUG nova.compute.manager [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Received event network-vif-plugged-60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1093.237965] env[61440]: DEBUG oslo_concurrency.lockutils [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] Acquiring lock "faf90964-1814-459f-89ef-0a27808077c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.237965] env[61440]: DEBUG oslo_concurrency.lockutils [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] Lock "faf90964-1814-459f-89ef-0a27808077c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.237965] env[61440]: DEBUG oslo_concurrency.lockutils [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] Lock "faf90964-1814-459f-89ef-0a27808077c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.238435] env[61440]: DEBUG nova.compute.manager [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] No waiting events found dispatching network-vif-plugged-60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1093.238435] env[61440]: WARNING nova.compute.manager [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Received unexpected event network-vif-plugged-60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 for instance with vm_state building and task_state spawning. [ 1093.238435] env[61440]: DEBUG nova.compute.manager [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Received event network-changed-60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1093.238435] env[61440]: DEBUG nova.compute.manager [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Refreshing instance network info cache due to event network-changed-60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1093.238435] env[61440]: DEBUG oslo_concurrency.lockutils [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] Acquiring lock "refresh_cache-faf90964-1814-459f-89ef-0a27808077c1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.406560] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Updating instance_info_cache with network_info: [{"id": "60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472", "address": "fa:16:3e:55:24:b3", "network": {"id": "73467ae0-5655-4bd4-a32d-acdae0f79f52", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-287531447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9d28d1a3f10499eb02cac15f357ac32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60b2f5cb-d2", "ovs_interfaceid": "60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.422086] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Releasing lock "refresh_cache-faf90964-1814-459f-89ef-0a27808077c1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.422472] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance network_info: |[{"id": "60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472", "address": "fa:16:3e:55:24:b3", "network": {"id": "73467ae0-5655-4bd4-a32d-acdae0f79f52", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-287531447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9d28d1a3f10499eb02cac15f357ac32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60b2f5cb-d2", "ovs_interfaceid": "60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1093.422778] env[61440]: DEBUG oslo_concurrency.lockutils [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] Acquired lock "refresh_cache-faf90964-1814-459f-89ef-0a27808077c1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.422975] env[61440]: DEBUG nova.network.neutron [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Refreshing network info cache for port 60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.424631] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:24:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15165046-2de9-4ada-9e99-0126e20854a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.434460] env[61440]: DEBUG oslo.service.loopingcall [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.435203] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: faf90964-1814-459f-89ef-0a27808077c1] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1093.435669] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9d261f0-efd0-44ed-b6e8-ec1d541d52e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.457188] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.457188] env[61440]: value = "task-4281309" [ 1093.457188] env[61440]: _type = "Task" [ 1093.457188] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.466561] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281309, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.725503] env[61440]: DEBUG nova.network.neutron [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Updated VIF entry in instance network info cache for port 60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.725874] env[61440]: DEBUG nova.network.neutron [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] [instance: faf90964-1814-459f-89ef-0a27808077c1] Updating instance_info_cache with network_info: [{"id": "60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472", "address": "fa:16:3e:55:24:b3", "network": {"id": "73467ae0-5655-4bd4-a32d-acdae0f79f52", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-287531447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9d28d1a3f10499eb02cac15f357ac32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60b2f5cb-d2", "ovs_interfaceid": "60b2f5cb-d24b-4c8b-a7b3-ce17d26f3472", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.736890] env[61440]: DEBUG oslo_concurrency.lockutils [req-104fcf9c-b599-47b0-a0c4-ae081eface03 req-44391b98-4560-4263-bf47-7046c89aa5bb service nova] Releasing lock "refresh_cache-faf90964-1814-459f-89ef-0a27808077c1" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.966513] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281309, 'name': CreateVM_Task, 'duration_secs': 0.295562} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.967177] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: faf90964-1814-459f-89ef-0a27808077c1] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1093.967559] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.967727] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.968070] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1093.968317] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86ec48e7-0edf-43d1-a494-5f59094bd8da {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.972641] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1093.972641] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]525b1a9f-55db-1598-b115-60bc160d9663" [ 1093.972641] env[61440]: _type = "Task" [ 1093.972641] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.981193] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]525b1a9f-55db-1598-b115-60bc160d9663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.483210] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.483547] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1094.483644] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.299913] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.299913] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1098.274364] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.274640] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.286878] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.287164] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1099.287203] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1099.310215] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.310411] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.310481] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.310608] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.310731] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.310855] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.310977] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.311118] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.311238] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.311359] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1099.311477] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1099.311969] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.274745] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.189200] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "269e724a-100e-4112-9c06-8a36871538ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.189563] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "269e724a-100e-4112-9c06-8a36871538ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.269928] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.271385] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.296960] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.297634] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.436123] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "faf90964-1814-459f-89ef-0a27808077c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.274361] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.286368] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.286599] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.286766] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.286955] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1103.288100] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c2e37b-da0a-4cac-b683-7a51c1f6b142 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.299033] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b42bbe-1f04-4c0e-a244-6c5508ffa379 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.312084] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659b9896-72b6-490f-a500-520e13d417ec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.318347] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797b75f6-3607-4b86-9ece-0022a2d6a494 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.347349] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180644MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1103.347489] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.347682] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.499801] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 3395aaef-0db6-4fab-b8a5-79b781129690 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.499801] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501050] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501050] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501050] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501050] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501236] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501236] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501236] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.501802] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.514675] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.525591] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 01f72fa9-b392-4789-bc71-6339634efc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.546579] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b5c17233-358d-489d-8897-96cc38427164 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.563038] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 67e6bf8c-34cb-4918-9680-a707ffd09e35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.573153] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a14259c3-7af5-4d14-866a-48763fe2faaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.582886] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 51f670be-26a7-4248-a0b7-386968bed988 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.592636] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.602356] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d5e3b4fc-b970-4162-a8af-e40ed91f4575 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.611734] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b3054adf-d5b8-4c79-8ae3-ffb4deb745b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.620975] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 5d96681c-395d-4a15-a699-cf30a4d69827 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.630715] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 08e64faa-b841-4ee2-9fe7-a74a5b100b99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.640825] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 039393b3-b017-41b4-ab38-7675d72101d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.649983] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20fcd3fa-cc08-4b89-af9d-5a6241864946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.288656] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.300020] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a925189b-1de9-4c1c-bdec-b10db97b85d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.311180] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.320480] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 047b54df-a55e-4e18-87f7-835466d9581e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.331056] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.331296] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1104.331441] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1104.350260] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing inventories for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1104.365507] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating ProviderTree inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1104.365691] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1104.376761] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing aggregate associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, aggregates: None {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1104.394763] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing trait associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1104.694712] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7473eeb6-5a65-468d-83f5-c6561f75395b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.701915] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581c64e3-6e4d-4033-ae7d-335c03c3a290 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.730240] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f55eeaf-2f03-4124-baee-0dcbc01501ad {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.736848] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88517e94-17c7-45ff-a024-214de4a89d27 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.749116] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.757592] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1104.775723] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1104.775916] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.428s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.275042] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.275042] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1105.285023] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] There are 0 instances to clean {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1139.454163] env[61440]: WARNING oslo_vmware.rw_handles [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1139.454163] env[61440]: ERROR oslo_vmware.rw_handles [ 1139.454821] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1139.456994] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1139.457266] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Copying Virtual Disk [datastore2] vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/3aad6b20-c004-476c-a250-e1e986f8d24f/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1139.457576] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8409a1ec-c1be-47e2-89a4-025bee0353a3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.466009] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 1139.466009] env[61440]: value = "task-4281310" [ 1139.466009] env[61440]: _type = "Task" [ 1139.466009] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.474221] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': task-4281310, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.976589] env[61440]: DEBUG oslo_vmware.exceptions [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1139.976891] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.977453] env[61440]: ERROR nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1139.977453] env[61440]: Faults: ['InvalidArgument'] [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Traceback (most recent call last): [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] yield resources [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self.driver.spawn(context, instance, image_meta, [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self._fetch_image_if_missing(context, vi) [ 1139.977453] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] image_cache(vi, tmp_image_ds_loc) [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] vm_util.copy_virtual_disk( [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] session._wait_for_task(vmdk_copy_task) [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] return self.wait_for_task(task_ref) [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] return evt.wait() [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] result = hub.switch() [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1139.977933] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] return self.greenlet.switch() [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self.f(*self.args, **self.kw) [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] raise exceptions.translate_fault(task_info.error) [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Faults: ['InvalidArgument'] [ 1139.978322] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] [ 1139.978322] env[61440]: INFO nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Terminating instance [ 1139.979349] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.979563] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.979806] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cecb5771-d5da-43dc-983f-f56b6f0c5066 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.983309] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1139.983309] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1139.983928] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fa4f1e-750b-4567-914a-aa1b50fd21de {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.987585] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.987758] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1139.988752] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-407a023d-594f-4b56-a7eb-f811116bb64c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.992635] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1139.993153] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4890799-c4fc-4ecc-82f9-e4f8f7e24d29 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.995609] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Waiting for the task: (returnval){ [ 1139.995609] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520df2f4-6270-1718-c290-d3465983ca06" [ 1139.995609] env[61440]: _type = "Task" [ 1139.995609] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.003050] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520df2f4-6270-1718-c290-d3465983ca06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.061420] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1140.061635] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1140.061811] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Deleting the datastore file [datastore2] 3395aaef-0db6-4fab-b8a5-79b781129690 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.062084] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59e4a89d-da83-461d-ba63-c18bdd24e57f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.067619] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for the task: (returnval){ [ 1140.067619] env[61440]: value = "task-4281312" [ 1140.067619] env[61440]: _type = "Task" [ 1140.067619] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.075388] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': task-4281312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.506379] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1140.506632] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Creating directory with path [datastore2] vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.506883] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c903e0eb-d7db-409c-97eb-e1cb4e96cb18 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.547760] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Created directory with path [datastore2] vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.547969] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Fetch image to [datastore2] vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1140.548158] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1140.548904] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3ea513-ebdd-4357-99f6-71ea8c871ba6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.555541] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437c1cb2-6814-43be-a995-02fde55a1b78 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.564203] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b01ea9-4ad7-4719-a291-e1737614fcae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.595030] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6935e988-c698-4cfe-9ebd-96f7334c0b77 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.599898] env[61440]: DEBUG oslo_vmware.api [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Task: {'id': task-4281312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076842} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.600426] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1140.600607] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1140.600773] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1140.600942] env[61440]: INFO nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1140.604079] env[61440]: DEBUG nova.compute.claims [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1140.604249] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.604456] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.607282] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-de6dfa72-a21d-4629-bda7-1bb12240cb8e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.630502] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1140.754507] env[61440]: DEBUG oslo_vmware.rw_handles [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1140.814244] env[61440]: DEBUG oslo_vmware.rw_handles [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1140.814446] env[61440]: DEBUG oslo_vmware.rw_handles [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1141.103431] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71060589-ba1e-41e4-a67a-baaae982b651 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.111085] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fa55c6-163a-4f20-b443-47a69f8ac3a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.140511] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c91d6ac-b49b-4883-a202-3f56af9873fb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.147080] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9851d453-0ea5-4f9f-9a36-5e7f8632fb15 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.159841] env[61440]: DEBUG nova.compute.provider_tree [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.169027] env[61440]: DEBUG nova.scheduler.client.report [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1141.184538] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.580s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.185333] env[61440]: ERROR nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1141.185333] env[61440]: Faults: ['InvalidArgument'] [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Traceback (most recent call last): [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self.driver.spawn(context, instance, image_meta, [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self._fetch_image_if_missing(context, vi) [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] image_cache(vi, tmp_image_ds_loc) [ 1141.185333] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] vm_util.copy_virtual_disk( [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] session._wait_for_task(vmdk_copy_task) [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] return self.wait_for_task(task_ref) [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] return evt.wait() [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] result = hub.switch() [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] return self.greenlet.switch() [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1141.185739] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] self.f(*self.args, **self.kw) [ 1141.186220] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1141.186220] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] raise exceptions.translate_fault(task_info.error) [ 1141.186220] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1141.186220] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Faults: ['InvalidArgument'] [ 1141.186220] env[61440]: ERROR nova.compute.manager [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] [ 1141.186220] env[61440]: DEBUG nova.compute.utils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1141.187524] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Build of instance 3395aaef-0db6-4fab-b8a5-79b781129690 was re-scheduled: A specified parameter was not correct: fileType [ 1141.187524] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1141.187993] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1141.188201] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1141.188377] env[61440]: DEBUG nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1141.188542] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.533166] env[61440]: DEBUG nova.network.neutron [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.547444] env[61440]: INFO nova.compute.manager [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Took 0.36 seconds to deallocate network for instance. [ 1141.638829] env[61440]: INFO nova.scheduler.client.report [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Deleted allocations for instance 3395aaef-0db6-4fab-b8a5-79b781129690 [ 1141.660622] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0135495a-6a7a-448e-b543-5fd2cbba6b72 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "3395aaef-0db6-4fab-b8a5-79b781129690" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 579.402s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.661807] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "3395aaef-0db6-4fab-b8a5-79b781129690" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.730s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.662108] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Acquiring lock "3395aaef-0db6-4fab-b8a5-79b781129690-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.662398] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "3395aaef-0db6-4fab-b8a5-79b781129690-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.662763] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "3395aaef-0db6-4fab-b8a5-79b781129690-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.666148] env[61440]: INFO nova.compute.manager [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Terminating instance [ 1141.666746] env[61440]: DEBUG nova.compute.manager [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1141.666848] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1141.667368] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09e7da7e-0626-4265-91e0-9dbaebe1416a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.677008] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972e1f17-a3d7-4d75-8681-52bf767b5a21 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.688869] env[61440]: DEBUG nova.compute.manager [None req-f8f68622-fb90-45e4-99ab-d90adb4c66c4 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 177602d8-99ae-40df-ac3b-63374dde5715] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1141.710911] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3395aaef-0db6-4fab-b8a5-79b781129690 could not be found. [ 1141.710911] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1141.710911] env[61440]: INFO nova.compute.manager [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1141.710911] env[61440]: DEBUG oslo.service.loopingcall [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.711135] env[61440]: DEBUG nova.compute.manager [-] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1141.711201] env[61440]: DEBUG nova.network.neutron [-] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.716863] env[61440]: DEBUG nova.compute.manager [None req-f8f68622-fb90-45e4-99ab-d90adb4c66c4 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 177602d8-99ae-40df-ac3b-63374dde5715] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1141.736147] env[61440]: DEBUG nova.network.neutron [-] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.739071] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f8f68622-fb90-45e4-99ab-d90adb4c66c4 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "177602d8-99ae-40df-ac3b-63374dde5715" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.220s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.744402] env[61440]: INFO nova.compute.manager [-] [instance: 3395aaef-0db6-4fab-b8a5-79b781129690] Took 0.03 seconds to deallocate network for instance. [ 1141.750537] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1141.804018] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.804018] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.804018] env[61440]: INFO nova.compute.claims [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.841885] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e82124b5-a1e9-4808-82cb-53f0ca765f26 tempest-ServersAdminTestJSON-1241047433 tempest-ServersAdminTestJSON-1241047433-project-member] Lock "3395aaef-0db6-4fab-b8a5-79b781129690" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.149148] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c26071-4202-4b28-ac41-5d02df7e519f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.156577] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddebcaa8-6d98-4404-95ae-8ed23039165b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.184901] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b5a7e9-3fca-489d-afc8-624b7dccf997 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.191758] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da73bf41-778e-4cdb-ad98-3e0884c6217b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.203961] env[61440]: DEBUG nova.compute.provider_tree [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.213025] env[61440]: DEBUG nova.scheduler.client.report [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1142.227563] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.426s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.228015] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1142.262816] env[61440]: DEBUG nova.compute.utils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1142.264648] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1142.264825] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1142.273060] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1142.320791] env[61440]: DEBUG nova.policy [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a265c56480374729bfd01371371c9794', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2851ef119a794c5993d1d8ff98eaf249', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1142.339218] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1142.373547] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1142.373784] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1142.373944] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.374143] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1142.374292] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.374439] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1142.374686] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1142.374874] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1142.375066] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1142.375262] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1142.375438] env[61440]: DEBUG nova.virt.hardware [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1142.376540] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bc4403-4325-4e9f-9571-8647e1320530 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.384553] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bf2eb3-8dc2-4b43-848c-56e838681b07 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.750932] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Successfully created port: d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1143.605940] env[61440]: DEBUG nova.compute.manager [req-888aa0e6-d3aa-4e5c-93f9-e8df37951492 req-3a08e010-9a2a-416d-b9e6-367a4ef4fd00 service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Received event network-vif-plugged-d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1143.606178] env[61440]: DEBUG oslo_concurrency.lockutils [req-888aa0e6-d3aa-4e5c-93f9-e8df37951492 req-3a08e010-9a2a-416d-b9e6-367a4ef4fd00 service nova] Acquiring lock "726c5ed6-d706-4886-a2c1-fc666a527662-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.606386] env[61440]: DEBUG oslo_concurrency.lockutils [req-888aa0e6-d3aa-4e5c-93f9-e8df37951492 req-3a08e010-9a2a-416d-b9e6-367a4ef4fd00 service nova] Lock "726c5ed6-d706-4886-a2c1-fc666a527662-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.606555] env[61440]: DEBUG oslo_concurrency.lockutils [req-888aa0e6-d3aa-4e5c-93f9-e8df37951492 req-3a08e010-9a2a-416d-b9e6-367a4ef4fd00 service nova] Lock "726c5ed6-d706-4886-a2c1-fc666a527662-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.606724] env[61440]: DEBUG nova.compute.manager [req-888aa0e6-d3aa-4e5c-93f9-e8df37951492 req-3a08e010-9a2a-416d-b9e6-367a4ef4fd00 service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] No waiting events found dispatching network-vif-plugged-d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1143.606909] env[61440]: WARNING nova.compute.manager [req-888aa0e6-d3aa-4e5c-93f9-e8df37951492 req-3a08e010-9a2a-416d-b9e6-367a4ef4fd00 service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Received unexpected event network-vif-plugged-d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 for instance with vm_state building and task_state spawning. [ 1143.706811] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Successfully updated port: d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1143.723309] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "refresh_cache-726c5ed6-d706-4886-a2c1-fc666a527662" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.723468] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired lock "refresh_cache-726c5ed6-d706-4886-a2c1-fc666a527662" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.723642] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1143.767745] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1143.947814] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Updating instance_info_cache with network_info: [{"id": "d39cdf9f-5a66-4ccc-a5e8-8389e2f04348", "address": "fa:16:3e:2d:ce:24", "network": {"id": "d5127eee-6df8-4d15-ab3a-d2702fa2d058", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1444475798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2851ef119a794c5993d1d8ff98eaf249", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c24464bb-bb6b-43a2-bdcd-8086ad1a307f", "external-id": "nsx-vlan-transportzone-781", "segmentation_id": 781, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd39cdf9f-5a", "ovs_interfaceid": "d39cdf9f-5a66-4ccc-a5e8-8389e2f04348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.958670] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Releasing lock "refresh_cache-726c5ed6-d706-4886-a2c1-fc666a527662" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.958969] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance network_info: |[{"id": "d39cdf9f-5a66-4ccc-a5e8-8389e2f04348", "address": "fa:16:3e:2d:ce:24", "network": {"id": "d5127eee-6df8-4d15-ab3a-d2702fa2d058", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1444475798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2851ef119a794c5993d1d8ff98eaf249", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c24464bb-bb6b-43a2-bdcd-8086ad1a307f", "external-id": "nsx-vlan-transportzone-781", "segmentation_id": 781, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd39cdf9f-5a", "ovs_interfaceid": "d39cdf9f-5a66-4ccc-a5e8-8389e2f04348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1143.959395] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:ce:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c24464bb-bb6b-43a2-bdcd-8086ad1a307f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd39cdf9f-5a66-4ccc-a5e8-8389e2f04348', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1143.967122] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating folder: Project (2851ef119a794c5993d1d8ff98eaf249). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1143.967739] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6186c82-ee18-4fe1-96ec-9a428a557209 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.978454] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Created folder: Project (2851ef119a794c5993d1d8ff98eaf249) in parent group-v843372. [ 1143.978650] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating folder: Instances. Parent ref: group-v843440. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1143.978884] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74cf6aba-5f3e-4f62-ba2d-f1bca795abfc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.988297] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Created folder: Instances in parent group-v843440. [ 1143.988616] env[61440]: DEBUG oslo.service.loopingcall [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1143.988860] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1143.989080] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a84c4c7c-17e1-4041-ab38-601f2f2b71da {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.009104] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.009104] env[61440]: value = "task-4281315" [ 1144.009104] env[61440]: _type = "Task" [ 1144.009104] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.017304] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281315, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.517339] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281315, 'name': CreateVM_Task, 'duration_secs': 0.271132} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.517339] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1144.517931] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.518111] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.518422] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1144.518738] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a343e3-0cb7-4268-9f1a-d14ae892c685 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.523075] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1144.523075] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f5c2df-c5e7-8228-4490-37aca50d9108" [ 1144.523075] env[61440]: _type = "Task" [ 1144.523075] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.531502] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f5c2df-c5e7-8228-4490-37aca50d9108, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.032831] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.033192] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1145.033306] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.573393] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_power_states {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.603766] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Getting list of instances from cluster (obj){ [ 1145.603766] env[61440]: value = "domain-c8" [ 1145.603766] env[61440]: _type = "ClusterComputeResource" [ 1145.603766] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1145.605177] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026fbef1-31e3-4c7a-be02-62a090a47e92 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.624374] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Got total of 10 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1145.624619] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 1438771e-fd84-4dac-81b1-c2df19972ebe {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.624877] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid f152a563-2988-4fac-9974-af25e17f14d1 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.626273] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 63fdeef4-93e6-408c-9b37-33bf3532a6e8 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.626637] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 23b7562f-035c-487f-a1f2-279b69ca4355 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.626850] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid b8a27ad2-4cc5-4219-9bc3-5735433b153c {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.627056] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.627253] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 6765defd-cd4d-49e2-a734-7b3cccca8bbd {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.628435] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.628435] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid faf90964-1814-459f-89ef-0a27808077c1 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.628435] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 726c5ed6-d706-4886-a2c1-fc666a527662 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1145.628435] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "1438771e-fd84-4dac-81b1-c2df19972ebe" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.628435] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "f152a563-2988-4fac-9974-af25e17f14d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.628679] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.629153] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "23b7562f-035c-487f-a1f2-279b69ca4355" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.629153] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.629340] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.629579] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.629817] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.630058] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "faf90964-1814-459f-89ef-0a27808077c1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.630297] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "726c5ed6-d706-4886-a2c1-fc666a527662" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.767774] env[61440]: DEBUG nova.compute.manager [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Received event network-changed-d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1145.767976] env[61440]: DEBUG nova.compute.manager [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Refreshing instance network info cache due to event network-changed-d39cdf9f-5a66-4ccc-a5e8-8389e2f04348. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1145.768772] env[61440]: DEBUG oslo_concurrency.lockutils [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] Acquiring lock "refresh_cache-726c5ed6-d706-4886-a2c1-fc666a527662" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.768772] env[61440]: DEBUG oslo_concurrency.lockutils [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] Acquired lock "refresh_cache-726c5ed6-d706-4886-a2c1-fc666a527662" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.768772] env[61440]: DEBUG nova.network.neutron [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Refreshing network info cache for port d39cdf9f-5a66-4ccc-a5e8-8389e2f04348 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1146.266868] env[61440]: DEBUG nova.network.neutron [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Updated VIF entry in instance network info cache for port d39cdf9f-5a66-4ccc-a5e8-8389e2f04348. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1146.267297] env[61440]: DEBUG nova.network.neutron [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Updating instance_info_cache with network_info: [{"id": "d39cdf9f-5a66-4ccc-a5e8-8389e2f04348", "address": "fa:16:3e:2d:ce:24", "network": {"id": "d5127eee-6df8-4d15-ab3a-d2702fa2d058", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1444475798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2851ef119a794c5993d1d8ff98eaf249", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c24464bb-bb6b-43a2-bdcd-8086ad1a307f", "external-id": "nsx-vlan-transportzone-781", "segmentation_id": 781, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd39cdf9f-5a", "ovs_interfaceid": "d39cdf9f-5a66-4ccc-a5e8-8389e2f04348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.278403] env[61440]: DEBUG oslo_concurrency.lockutils [req-96ae50b8-c2bc-4e70-84d0-5734ec432388 req-72cd829f-e52e-4b14-a05d-239a39a57afe service nova] Releasing lock "refresh_cache-726c5ed6-d706-4886-a2c1-fc666a527662" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.798459] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "726c5ed6-d706-4886-a2c1-fc666a527662" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.118265] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "608ac5c2-3518-4da0-992f-a752584165a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.118265] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "608ac5c2-3518-4da0-992f-a752584165a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.991673] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8a0e764b-2905-4c50-8d16-2201720fd429 tempest-ServerActionsTestOtherA-1315040339 tempest-ServerActionsTestOtherA-1315040339-project-member] Acquiring lock "0244475d-98ff-4801-a648-6728f85171ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.991925] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8a0e764b-2905-4c50-8d16-2201720fd429 tempest-ServerActionsTestOtherA-1315040339 tempest-ServerActionsTestOtherA-1315040339-project-member] Lock "0244475d-98ff-4801-a648-6728f85171ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.275463] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.275463] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1160.274493] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.274740] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.274926] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.275721] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.275925] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1161.275925] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1161.299639] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.299969] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300167] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300338] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300474] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300602] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300723] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300845] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.300962] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.301220] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1161.301351] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1162.273805] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.273989] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.274262] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.274592] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.286226] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.286433] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.286615] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.286769] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1164.287875] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7ae372-9c13-41d0-b2ec-6ae242bbf114 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.296494] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a7ee02-fdeb-4af0-b75f-4bc65397c697 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.309835] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9182da-fb82-4949-9cb7-d1d21a6c2684 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.315968] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6288feae-efc8-4e18-b959-67145740847f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.343407] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180663MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1164.343645] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.343718] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.426278] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 1438771e-fd84-4dac-81b1-c2df19972ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.426441] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.426567] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.426690] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.426808] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.426925] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.427061] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.427186] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.427303] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.427416] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1164.442941] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a14259c3-7af5-4d14-866a-48763fe2faaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.453084] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 51f670be-26a7-4248-a0b7-386968bed988 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.463850] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.473373] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance d5e3b4fc-b970-4162-a8af-e40ed91f4575 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.482868] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b3054adf-d5b8-4c79-8ae3-ffb4deb745b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.493040] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 5d96681c-395d-4a15-a699-cf30a4d69827 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.502619] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 08e64faa-b841-4ee2-9fe7-a74a5b100b99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.511748] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 039393b3-b017-41b4-ab38-7675d72101d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.520980] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 20fcd3fa-cc08-4b89-af9d-5a6241864946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.530471] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.540853] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a925189b-1de9-4c1c-bdec-b10db97b85d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.551564] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.566230] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 047b54df-a55e-4e18-87f7-835466d9581e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.576955] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.587042] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.597789] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0244475d-98ff-4801-a648-6728f85171ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.598033] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1164.598187] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1164.884855] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4368d5-16ee-4e68-b0ce-860707295004 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.892141] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37077912-0f10-4864-b97c-eef1a023500b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.921402] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d88f48-b9d6-4d32-8a72-e76153f9ed58 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.928167] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e9f2ba-39f0-4e7b-ab6d-bec03daefd5b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.941161] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.949793] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1164.964295] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1164.964482] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.621s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.470581] env[61440]: WARNING oslo_vmware.rw_handles [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1189.470581] env[61440]: ERROR oslo_vmware.rw_handles [ 1189.471309] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1189.472858] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1189.473149] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Copying Virtual Disk [datastore2] vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/f68c551e-d799-4224-822d-3c46d3f08c35/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1189.473472] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75b000bb-f5ea-497e-803f-206807d34e02 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.481767] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Waiting for the task: (returnval){ [ 1189.481767] env[61440]: value = "task-4281316" [ 1189.481767] env[61440]: _type = "Task" [ 1189.481767] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.489358] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Task: {'id': task-4281316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.997510] env[61440]: DEBUG oslo_vmware.exceptions [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1189.997510] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.997510] env[61440]: ERROR nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1189.997510] env[61440]: Faults: ['InvalidArgument'] [ 1189.997510] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Traceback (most recent call last): [ 1189.997510] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1189.997510] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] yield resources [ 1189.997510] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1189.997510] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self.driver.spawn(context, instance, image_meta, [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self._fetch_image_if_missing(context, vi) [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] image_cache(vi, tmp_image_ds_loc) [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] vm_util.copy_virtual_disk( [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] session._wait_for_task(vmdk_copy_task) [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] return self.wait_for_task(task_ref) [ 1189.997977] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] return evt.wait() [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] result = hub.switch() [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] return self.greenlet.switch() [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self.f(*self.args, **self.kw) [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] raise exceptions.translate_fault(task_info.error) [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Faults: ['InvalidArgument'] [ 1189.998449] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] [ 1189.998958] env[61440]: INFO nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Terminating instance [ 1189.998958] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.999135] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.999412] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-192f2395-1a17-4b73-8bc3-9144ad26cf8d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.003889] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1190.004126] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1190.004872] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b318eed-ba3f-4b90-bfda-2ddae77caa5b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.012246] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1190.013432] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcee299f-6879-46aa-8c05-9bf053212662 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.015114] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.015383] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1190.016076] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa168f3-925f-4553-a0f4-6b2fd9ffb2fa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.021047] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Waiting for the task: (returnval){ [ 1190.021047] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526f4c98-103f-914d-56cb-68ecb4dac535" [ 1190.021047] env[61440]: _type = "Task" [ 1190.021047] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.028677] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526f4c98-103f-914d-56cb-68ecb4dac535, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.257665] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1190.257924] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1190.258116] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Deleting the datastore file [datastore2] 1438771e-fd84-4dac-81b1-c2df19972ebe {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1190.258389] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-750bd3db-f2a9-4a10-9518-c0688268e9d1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.265275] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Waiting for the task: (returnval){ [ 1190.265275] env[61440]: value = "task-4281318" [ 1190.265275] env[61440]: _type = "Task" [ 1190.265275] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.272383] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Task: {'id': task-4281318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.532015] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1190.532015] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Creating directory with path [datastore2] vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.532015] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c26205b-8246-4865-8fb3-eb9acdfbd5b5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.548112] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Created directory with path [datastore2] vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.548310] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Fetch image to [datastore2] vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1190.548495] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1190.549287] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1aa19ef-1522-4cf3-9311-19f411251073 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.556481] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4110519a-50fe-4adf-a7a6-b2e5a7476e08 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.565788] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3446ec-dd37-4808-8d46-db3c1162c87d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.598249] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87657557-3b5e-4fda-97b4-8dbf606c9b43 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.603834] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f5710a82-17e3-44e9-b24d-460ab3069daa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.625054] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1190.778929] env[61440]: DEBUG oslo_vmware.api [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Task: {'id': task-4281318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078194} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.778929] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.779171] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1190.779374] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1190.779640] env[61440]: INFO nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Took 0.78 seconds to destroy the instance on the hypervisor. [ 1190.782362] env[61440]: DEBUG nova.compute.claims [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1190.782613] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.782915] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.927920] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1190.990160] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1190.990304] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1191.270589] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b201e5f7-435c-4d3e-8328-d7e81551269d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.278306] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fc0c6c-f93e-43d6-a752-aa1c4c3f2126 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.308049] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fae2c3-8d73-4e23-925d-53ee360dae88 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.315427] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce3a46a-a1e1-48fb-aa28-19541e5d85e6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.328234] env[61440]: DEBUG nova.compute.provider_tree [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.336369] env[61440]: DEBUG nova.scheduler.client.report [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1191.358339] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.575s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.358863] env[61440]: ERROR nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1191.358863] env[61440]: Faults: ['InvalidArgument'] [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Traceback (most recent call last): [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self.driver.spawn(context, instance, image_meta, [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self._fetch_image_if_missing(context, vi) [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] image_cache(vi, tmp_image_ds_loc) [ 1191.358863] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] vm_util.copy_virtual_disk( [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] session._wait_for_task(vmdk_copy_task) [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] return self.wait_for_task(task_ref) [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] return evt.wait() [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] result = hub.switch() [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] return self.greenlet.switch() [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1191.359305] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] self.f(*self.args, **self.kw) [ 1191.359738] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1191.359738] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] raise exceptions.translate_fault(task_info.error) [ 1191.359738] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1191.359738] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Faults: ['InvalidArgument'] [ 1191.359738] env[61440]: ERROR nova.compute.manager [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] [ 1191.359738] env[61440]: DEBUG nova.compute.utils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1191.361039] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Build of instance 1438771e-fd84-4dac-81b1-c2df19972ebe was re-scheduled: A specified parameter was not correct: fileType [ 1191.361039] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1191.361423] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1191.361599] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1191.361789] env[61440]: DEBUG nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1191.361952] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1192.024219] env[61440]: DEBUG nova.network.neutron [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.071573] env[61440]: INFO nova.compute.manager [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Took 0.71 seconds to deallocate network for instance. [ 1192.241091] env[61440]: INFO nova.scheduler.client.report [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Deleted allocations for instance 1438771e-fd84-4dac-81b1-c2df19972ebe [ 1192.278465] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fd6ab872-6b05-4348-972a-2e1d48540008 tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.465s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.279660] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.804s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.279886] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Acquiring lock "1438771e-fd84-4dac-81b1-c2df19972ebe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.280114] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.280286] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.282537] env[61440]: INFO nova.compute.manager [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Terminating instance [ 1192.285412] env[61440]: DEBUG nova.compute.manager [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1192.288543] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1192.288543] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d591bab0-f30a-42c3-ab0d-3dd6b6d48b4a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.295779] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcdf06c6-2d38-419e-b8e9-92c817743820 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.306826] env[61440]: DEBUG nova.compute.manager [None req-6b0e8daa-5362-4527-b675-f341701fd618 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 01f72fa9-b392-4789-bc71-6339634efc28] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.327904] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1438771e-fd84-4dac-81b1-c2df19972ebe could not be found. [ 1192.328113] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1192.328298] env[61440]: INFO nova.compute.manager [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1192.328544] env[61440]: DEBUG oslo.service.loopingcall [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1192.328753] env[61440]: DEBUG nova.compute.manager [-] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1192.328846] env[61440]: DEBUG nova.network.neutron [-] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1192.334229] env[61440]: DEBUG nova.compute.manager [None req-6b0e8daa-5362-4527-b675-f341701fd618 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 01f72fa9-b392-4789-bc71-6339634efc28] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.358933] env[61440]: DEBUG nova.network.neutron [-] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.368144] env[61440]: INFO nova.compute.manager [-] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] Took 0.04 seconds to deallocate network for instance. [ 1192.376535] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b0e8daa-5362-4527-b675-f341701fd618 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "01f72fa9-b392-4789-bc71-6339634efc28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.736s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.389958] env[61440]: DEBUG nova.compute.manager [None req-1bd2ce90-e5c2-4a64-9a6b-1070b3330706 tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] [instance: b5c17233-358d-489d-8897-96cc38427164] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.420158] env[61440]: DEBUG nova.compute.manager [None req-1bd2ce90-e5c2-4a64-9a6b-1070b3330706 tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] [instance: b5c17233-358d-489d-8897-96cc38427164] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.449529] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1bd2ce90-e5c2-4a64-9a6b-1070b3330706 tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] Lock "b5c17233-358d-489d-8897-96cc38427164" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.770s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.463340] env[61440]: DEBUG nova.compute.manager [None req-f391f9a5-a54e-4b81-8460-69790bd30b1a tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] [instance: 67e6bf8c-34cb-4918-9680-a707ffd09e35] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.517727] env[61440]: DEBUG nova.compute.manager [None req-f391f9a5-a54e-4b81-8460-69790bd30b1a tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] [instance: 67e6bf8c-34cb-4918-9680-a707ffd09e35] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.547566] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c65cd2d6-c063-4297-8e68-70ffcad2868a tempest-AttachInterfacesV270Test-403380279 tempest-AttachInterfacesV270Test-403380279-project-member] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.268s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.548720] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 46.921s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.548948] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 1438771e-fd84-4dac-81b1-c2df19972ebe] During sync_power_state the instance has a pending task (deleting). Skip. [ 1192.549168] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "1438771e-fd84-4dac-81b1-c2df19972ebe" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.569674] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f391f9a5-a54e-4b81-8460-69790bd30b1a tempest-ServerShowV247Test-1593778620 tempest-ServerShowV247Test-1593778620-project-member] Lock "67e6bf8c-34cb-4918-9680-a707ffd09e35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.463s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.591713] env[61440]: DEBUG nova.compute.manager [None req-d8812c4a-da42-4ca1-9b8a-afecb9679d9f tempest-ServerActionsV293TestJSON-573375651 tempest-ServerActionsV293TestJSON-573375651-project-member] [instance: a14259c3-7af5-4d14-866a-48763fe2faaf] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.615711] env[61440]: DEBUG nova.compute.manager [None req-d8812c4a-da42-4ca1-9b8a-afecb9679d9f tempest-ServerActionsV293TestJSON-573375651 tempest-ServerActionsV293TestJSON-573375651-project-member] [instance: a14259c3-7af5-4d14-866a-48763fe2faaf] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.957207] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d8812c4a-da42-4ca1-9b8a-afecb9679d9f tempest-ServerActionsV293TestJSON-573375651 tempest-ServerActionsV293TestJSON-573375651-project-member] Lock "a14259c3-7af5-4d14-866a-48763fe2faaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.606s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.303742] env[61440]: DEBUG nova.compute.manager [None req-fa5411aa-b459-4707-b734-80d679e20f0a tempest-ServerAddressesTestJSON-756750789 tempest-ServerAddressesTestJSON-756750789-project-member] [instance: 51f670be-26a7-4248-a0b7-386968bed988] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1193.327520] env[61440]: DEBUG nova.compute.manager [None req-fa5411aa-b459-4707-b734-80d679e20f0a tempest-ServerAddressesTestJSON-756750789 tempest-ServerAddressesTestJSON-756750789-project-member] [instance: 51f670be-26a7-4248-a0b7-386968bed988] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1193.359429] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fa5411aa-b459-4707-b734-80d679e20f0a tempest-ServerAddressesTestJSON-756750789 tempest-ServerAddressesTestJSON-756750789-project-member] Lock "51f670be-26a7-4248-a0b7-386968bed988" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.204s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.372362] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1193.465392] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.465677] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.467197] env[61440]: INFO nova.compute.claims [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1193.862725] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644c1d32-1618-4329-93af-9c8f6cea17ed {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.869855] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6bf7a2-691a-47f5-b444-d716dadb1c7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.899403] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41910c8f-abfa-4a9e-af10-38d5d9b5b892 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.906630] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49db25a1-002d-462e-bc8d-fd671145143e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.925075] env[61440]: DEBUG nova.compute.provider_tree [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.934782] env[61440]: DEBUG nova.scheduler.client.report [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1193.974261] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.508s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.974946] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1194.028732] env[61440]: DEBUG nova.compute.utils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1194.030082] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1194.030268] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1194.048108] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1194.153349] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1194.157282] env[61440]: DEBUG nova.policy [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be6e550783b541708ef77649a1a49bee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '159802197c81449f9138052109213f52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1194.201014] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1194.201307] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1194.201485] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.201679] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1194.201812] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.201959] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1194.202186] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1194.202375] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1194.202579] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1194.202752] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1194.202929] env[61440]: DEBUG nova.virt.hardware [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1194.203812] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8277b4c0-b7ee-4eb9-b15f-f6df08e347f8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.212109] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8edb85-f212-4beb-a0f2-b87e42c19234 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.361910] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "fd9b654a-0651-46ae-a7c9-30743b875e2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.764850] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Successfully created port: 72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1195.504187] env[61440]: DEBUG nova.compute.manager [req-2dc79df3-809b-4d40-966a-6cd59b82c4a9 req-173963c6-3e45-4319-bba0-96fa10a41f1b service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Received event network-vif-plugged-72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1195.504446] env[61440]: DEBUG oslo_concurrency.lockutils [req-2dc79df3-809b-4d40-966a-6cd59b82c4a9 req-173963c6-3e45-4319-bba0-96fa10a41f1b service nova] Acquiring lock "fd9b654a-0651-46ae-a7c9-30743b875e2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.504640] env[61440]: DEBUG oslo_concurrency.lockutils [req-2dc79df3-809b-4d40-966a-6cd59b82c4a9 req-173963c6-3e45-4319-bba0-96fa10a41f1b service nova] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.504817] env[61440]: DEBUG oslo_concurrency.lockutils [req-2dc79df3-809b-4d40-966a-6cd59b82c4a9 req-173963c6-3e45-4319-bba0-96fa10a41f1b service nova] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.504984] env[61440]: DEBUG nova.compute.manager [req-2dc79df3-809b-4d40-966a-6cd59b82c4a9 req-173963c6-3e45-4319-bba0-96fa10a41f1b service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] No waiting events found dispatching network-vif-plugged-72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1195.505593] env[61440]: WARNING nova.compute.manager [req-2dc79df3-809b-4d40-966a-6cd59b82c4a9 req-173963c6-3e45-4319-bba0-96fa10a41f1b service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Received unexpected event network-vif-plugged-72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 for instance with vm_state building and task_state deleting. [ 1195.863909] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Successfully updated port: 72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1195.955851] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.956009] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquired lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.956222] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1196.006343] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1196.402041] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Updating instance_info_cache with network_info: [{"id": "72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7", "address": "fa:16:3e:32:d5:2a", "network": {"id": "276d6f95-c4d8-4dea-aa27-60ca980312dc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1079747817-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "159802197c81449f9138052109213f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72fc2e0d-41", "ovs_interfaceid": "72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.414132] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Releasing lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.414410] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance network_info: |[{"id": "72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7", "address": "fa:16:3e:32:d5:2a", "network": {"id": "276d6f95-c4d8-4dea-aa27-60ca980312dc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1079747817-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "159802197c81449f9138052109213f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72fc2e0d-41", "ovs_interfaceid": "72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1196.414812] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:d5:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.422185] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Creating folder: Project (159802197c81449f9138052109213f52). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1196.422711] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d049c992-4588-49c7-93c8-64811cc53623 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.433765] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Created folder: Project (159802197c81449f9138052109213f52) in parent group-v843372. [ 1196.433952] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Creating folder: Instances. Parent ref: group-v843443. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1196.434259] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-647d630b-953b-4988-9bd3-e9cec78fa619 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.443582] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Created folder: Instances in parent group-v843443. [ 1196.443863] env[61440]: DEBUG oslo.service.loopingcall [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.443990] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1196.444199] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edcb3481-b903-47b6-a22a-1a3a76336c37 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.463941] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.463941] env[61440]: value = "task-4281321" [ 1196.463941] env[61440]: _type = "Task" [ 1196.463941] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.471015] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281321, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.973186] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281321, 'name': CreateVM_Task, 'duration_secs': 0.29398} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.973459] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1196.981989] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.982184] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.982505] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1196.982754] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0a7a35d-4532-42e9-ac22-b30fa8857833 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.987678] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Waiting for the task: (returnval){ [ 1196.987678] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52b8ecfe-e082-54ea-53b7-c9bca672e5bc" [ 1196.987678] env[61440]: _type = "Task" [ 1196.987678] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.994451] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52b8ecfe-e082-54ea-53b7-c9bca672e5bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.498497] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.498497] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.498708] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.723553] env[61440]: DEBUG nova.compute.manager [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Received event network-changed-72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1197.723724] env[61440]: DEBUG nova.compute.manager [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Refreshing instance network info cache due to event network-changed-72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1197.723939] env[61440]: DEBUG oslo_concurrency.lockutils [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] Acquiring lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.724112] env[61440]: DEBUG oslo_concurrency.lockutils [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] Acquired lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.724292] env[61440]: DEBUG nova.network.neutron [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Refreshing network info cache for port 72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1198.240282] env[61440]: DEBUG nova.network.neutron [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Updated VIF entry in instance network info cache for port 72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1198.240652] env[61440]: DEBUG nova.network.neutron [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Updating instance_info_cache with network_info: [{"id": "72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7", "address": "fa:16:3e:32:d5:2a", "network": {"id": "276d6f95-c4d8-4dea-aa27-60ca980312dc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1079747817-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "159802197c81449f9138052109213f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72fc2e0d-41", "ovs_interfaceid": "72fc2e0d-415a-456f-8a5d-e0ad8dc2f6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.252915] env[61440]: DEBUG oslo_concurrency.lockutils [req-514cce6a-97fb-4d04-8810-ac661a7d3115 req-c335dfb5-64a3-4931-8ad5-e86017b011b8 service nova] Releasing lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.825341] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "2486ea17-09bd-410d-a96d-bc863c3354e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.825618] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.001538] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7664a823-e632-41a5-b7af-82426e039ac1 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] Acquiring lock "9349b760-746f-40fa-998d-fdcb325431b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.001823] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7664a823-e632-41a5-b7af-82426e039ac1 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] Lock "9349b760-746f-40fa-998d-fdcb325431b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.865575] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a674334e-fe2c-42a7-9fa5-3ecafb0b1734 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "97f08952-2a93-4e0c-9e46-31fc421a0291" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.865909] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a674334e-fe2c-42a7-9fa5-3ecafb0b1734 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "97f08952-2a93-4e0c-9e46-31fc421a0291" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.821098] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8c9e0cd0-d5ee-4c33-af73-fd79bcc3d0ba tempest-ServerMetadataNegativeTestJSON-1986415539 tempest-ServerMetadataNegativeTestJSON-1986415539-project-member] Acquiring lock "e0872184-53cf-46b9-826d-f48c83506911" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.821480] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8c9e0cd0-d5ee-4c33-af73-fd79bcc3d0ba tempest-ServerMetadataNegativeTestJSON-1986415539 tempest-ServerMetadataNegativeTestJSON-1986415539-project-member] Lock "e0872184-53cf-46b9-826d-f48c83506911" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.802785] env[61440]: DEBUG oslo_concurrency.lockutils [None req-39d3dc05-4c01-459b-898b-036e8d7486ff tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "607c5b5e-84b8-458d-a430-5171095922f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.803108] env[61440]: DEBUG oslo_concurrency.lockutils [None req-39d3dc05-4c01-459b-898b-036e8d7486ff tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "607c5b5e-84b8-458d-a430-5171095922f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.964378] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.964666] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1220.274622] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.274042] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.275191] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.275547] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1222.275547] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1222.298890] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.299095] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.299271] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.299429] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.299581] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.299731] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.299886] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.300062] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.300219] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.300350] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1222.300471] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1222.300962] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.274785] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.269657] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.269947] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.275155] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.275472] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.286543] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.286755] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.286940] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.287130] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1226.288236] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f01457-a6dd-495e-b9aa-4a13cbc81f9e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.297207] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d4f501-de29-4dd0-9fe6-0aeba5e5aabf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.313336] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f18e668-17c7-4f86-bcc0-32eac3a1d6cd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.320051] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10130190-de70-41f6-b85b-3b374fe3013a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.348220] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180673MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1226.348369] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.348560] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.423942] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f152a563-2988-4fac-9974-af25e17f14d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424129] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424264] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424390] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424512] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424632] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424750] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424869] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.424986] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.425118] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.437401] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a925189b-1de9-4c1c-bdec-b10db97b85d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.449028] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.459671] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 047b54df-a55e-4e18-87f7-835466d9581e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.471881] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.482017] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.492697] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0244475d-98ff-4801-a648-6728f85171ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.504147] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.515789] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9349b760-746f-40fa-998d-fdcb325431b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.527989] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 97f08952-2a93-4e0c-9e46-31fc421a0291 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.538125] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e0872184-53cf-46b9-826d-f48c83506911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.546801] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 607c5b5e-84b8-458d-a430-5171095922f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.547319] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1226.547620] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1226.781558] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa08a6ae-053a-41ea-ad50-34649649c60f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.788953] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db4ea29-b0e8-4806-877a-60185611909e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.819349] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e927a25d-9315-4a4d-adfb-1a0a7e42c72b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.826368] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d316986-c762-40ed-a904-7b509aa434f7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.839125] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.849028] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1226.863190] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1226.863385] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.515s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.325301] env[61440]: WARNING oslo_vmware.rw_handles [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1238.325301] env[61440]: ERROR oslo_vmware.rw_handles [ 1238.325861] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1238.327400] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1238.327641] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Copying Virtual Disk [datastore2] vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/8bc0a6d9-2e3a-4be2-8147-fb4277235cf6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1238.327927] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-769ec315-9150-4811-88db-997689c0c40b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.337453] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Waiting for the task: (returnval){ [ 1238.337453] env[61440]: value = "task-4281322" [ 1238.337453] env[61440]: _type = "Task" [ 1238.337453] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.344847] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Task: {'id': task-4281322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.848078] env[61440]: DEBUG oslo_vmware.exceptions [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1238.848078] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.848363] env[61440]: ERROR nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1238.848363] env[61440]: Faults: ['InvalidArgument'] [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] Traceback (most recent call last): [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] yield resources [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self.driver.spawn(context, instance, image_meta, [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self._fetch_image_if_missing(context, vi) [ 1238.848363] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] image_cache(vi, tmp_image_ds_loc) [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] vm_util.copy_virtual_disk( [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] session._wait_for_task(vmdk_copy_task) [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] return self.wait_for_task(task_ref) [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] return evt.wait() [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] result = hub.switch() [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1238.848614] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] return self.greenlet.switch() [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self.f(*self.args, **self.kw) [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] raise exceptions.translate_fault(task_info.error) [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] Faults: ['InvalidArgument'] [ 1238.848859] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] [ 1238.848859] env[61440]: INFO nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Terminating instance [ 1238.850263] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.850471] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1238.850705] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9ca5de0-6d2f-4565-a711-6ad833add650 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.853028] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1238.853227] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1238.853910] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0b91d2-aea3-46f4-989d-372d24aca5c6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.860414] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1238.860616] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4bccbf8-51d9-4a6f-a2cf-766cd8e7df3e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.862620] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1238.862790] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1238.863717] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d229b50-0629-4995-8cb5-e35b0e3700ad {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.868284] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Waiting for the task: (returnval){ [ 1238.868284] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5255d7a8-e5ab-d3ed-6c4d-b56643a58e92" [ 1238.868284] env[61440]: _type = "Task" [ 1238.868284] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.877853] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5255d7a8-e5ab-d3ed-6c4d-b56643a58e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.923880] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1238.924172] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1238.924314] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Deleting the datastore file [datastore2] f152a563-2988-4fac-9974-af25e17f14d1 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.924576] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dd17a0d-ed5f-4513-8f2e-dce0d972c68f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.930674] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Waiting for the task: (returnval){ [ 1238.930674] env[61440]: value = "task-4281324" [ 1238.930674] env[61440]: _type = "Task" [ 1238.930674] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.939299] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Task: {'id': task-4281324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.378683] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1239.378984] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Creating directory with path [datastore2] vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.379186] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6e30a6a-e332-46e4-bcd4-b17fd6370d86 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.390485] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Created directory with path [datastore2] vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.390687] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Fetch image to [datastore2] vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1239.390860] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1239.392311] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8c8410-674a-4390-98c4-3a89cf309273 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.399240] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ecd2e6-365b-4268-9892-b17b0560568a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.408331] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67332f83-3ef7-4c0b-8f11-4103b8a968ca {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.441365] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e06db31-5974-4c0e-a77e-ec4a65703700 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.448481] env[61440]: DEBUG oslo_vmware.api [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Task: {'id': task-4281324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068421} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.449962] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1239.450175] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1239.450357] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1239.450535] env[61440]: INFO nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1239.452311] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae04c5a5-7e5a-48e6-bb03-562721fd26c2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.454183] env[61440]: DEBUG nova.compute.claims [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1239.454360] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.454599] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.477111] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1239.629508] env[61440]: DEBUG oslo_vmware.rw_handles [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1239.688405] env[61440]: DEBUG oslo_vmware.rw_handles [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1239.688526] env[61440]: DEBUG oslo_vmware.rw_handles [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1239.796340] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02ed6dc-7ef8-4a7c-bbca-62830ab35588 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.804106] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3f63c4-d9cd-4051-959c-88d7eb4377de {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.833517] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc73479-7837-4678-96a9-82d85002dbea {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.840393] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973de5e9-7062-4aad-834d-fd858e41912d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.852907] env[61440]: DEBUG nova.compute.provider_tree [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.861399] env[61440]: DEBUG nova.scheduler.client.report [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1239.876516] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.422s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.877011] env[61440]: ERROR nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1239.877011] env[61440]: Faults: ['InvalidArgument'] [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] Traceback (most recent call last): [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self.driver.spawn(context, instance, image_meta, [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self._fetch_image_if_missing(context, vi) [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] image_cache(vi, tmp_image_ds_loc) [ 1239.877011] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] vm_util.copy_virtual_disk( [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] session._wait_for_task(vmdk_copy_task) [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] return self.wait_for_task(task_ref) [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] return evt.wait() [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] result = hub.switch() [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] return self.greenlet.switch() [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1239.877290] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] self.f(*self.args, **self.kw) [ 1239.877535] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1239.877535] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] raise exceptions.translate_fault(task_info.error) [ 1239.877535] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1239.877535] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] Faults: ['InvalidArgument'] [ 1239.877535] env[61440]: ERROR nova.compute.manager [instance: f152a563-2988-4fac-9974-af25e17f14d1] [ 1239.877788] env[61440]: DEBUG nova.compute.utils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1239.879737] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Build of instance f152a563-2988-4fac-9974-af25e17f14d1 was re-scheduled: A specified parameter was not correct: fileType [ 1239.879737] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1239.880139] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1239.880317] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1239.880526] env[61440]: DEBUG nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1239.880725] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1240.469830] env[61440]: DEBUG nova.network.neutron [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.482292] env[61440]: INFO nova.compute.manager [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Took 0.60 seconds to deallocate network for instance. [ 1240.573604] env[61440]: INFO nova.scheduler.client.report [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Deleted allocations for instance f152a563-2988-4fac-9974-af25e17f14d1 [ 1240.608840] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a962be6c-b161-44f2-b881-71dbe041218d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "f152a563-2988-4fac-9974-af25e17f14d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 672.550s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.610285] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "f152a563-2988-4fac-9974-af25e17f14d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 474.771s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.610368] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Acquiring lock "f152a563-2988-4fac-9974-af25e17f14d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.610523] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "f152a563-2988-4fac-9974-af25e17f14d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.610703] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "f152a563-2988-4fac-9974-af25e17f14d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.613174] env[61440]: INFO nova.compute.manager [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Terminating instance [ 1240.615303] env[61440]: DEBUG nova.compute.manager [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1240.615303] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1240.615933] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d12cdb15-5c75-464c-8d5a-b9816b9df85e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.626463] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b4f920-a5da-4043-8906-5d75e8fdbc83 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.637909] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: d5e3b4fc-b970-4162-a8af-e40ed91f4575] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.659826] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f152a563-2988-4fac-9974-af25e17f14d1 could not be found. [ 1240.660043] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1240.660229] env[61440]: INFO nova.compute.manager [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1240.660478] env[61440]: DEBUG oslo.service.loopingcall [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1240.660735] env[61440]: DEBUG nova.compute.manager [-] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1240.660839] env[61440]: DEBUG nova.network.neutron [-] [instance: f152a563-2988-4fac-9974-af25e17f14d1] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1240.686035] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: d5e3b4fc-b970-4162-a8af-e40ed91f4575] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1240.690379] env[61440]: DEBUG nova.network.neutron [-] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.701845] env[61440]: INFO nova.compute.manager [-] [instance: f152a563-2988-4fac-9974-af25e17f14d1] Took 0.04 seconds to deallocate network for instance. [ 1240.711271] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "d5e3b4fc-b970-4162-a8af-e40ed91f4575" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.297s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.721880] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: b3054adf-d5b8-4c79-8ae3-ffb4deb745b3] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.758911] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: b3054adf-d5b8-4c79-8ae3-ffb4deb745b3] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1240.782207] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "b3054adf-d5b8-4c79-8ae3-ffb4deb745b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.326s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.792739] env[61440]: DEBUG nova.compute.manager [None req-0565004a-e8b7-4419-9ab6-908c95073f77 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] [instance: 5d96681c-395d-4a15-a699-cf30a4d69827] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.822910] env[61440]: DEBUG nova.compute.manager [None req-0565004a-e8b7-4419-9ab6-908c95073f77 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] [instance: 5d96681c-395d-4a15-a699-cf30a4d69827] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1240.827328] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a58df0f4-4ad1-4ac7-acca-b6f7f763958d tempest-ServerAddressesNegativeTestJSON-425270418 tempest-ServerAddressesNegativeTestJSON-425270418-project-member] Lock "f152a563-2988-4fac-9974-af25e17f14d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.217s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.829390] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "f152a563-2988-4fac-9974-af25e17f14d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 95.201s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.829697] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f152a563-2988-4fac-9974-af25e17f14d1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1240.829991] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "f152a563-2988-4fac-9974-af25e17f14d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.845231] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0565004a-e8b7-4419-9ab6-908c95073f77 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] Lock "5d96681c-395d-4a15-a699-cf30a4d69827" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.441s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.854635] env[61440]: DEBUG nova.compute.manager [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: 08e64faa-b841-4ee2-9fe7-a74a5b100b99] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.880025] env[61440]: DEBUG nova.compute.manager [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: 08e64faa-b841-4ee2-9fe7-a74a5b100b99] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1240.903243] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "08e64faa-b841-4ee2-9fe7-a74a5b100b99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.994s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.913978] env[61440]: DEBUG nova.compute.manager [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: 039393b3-b017-41b4-ab38-7675d72101d8] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.936032] env[61440]: DEBUG nova.compute.manager [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] [instance: 039393b3-b017-41b4-ab38-7675d72101d8] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1240.956490] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7246d48-6fc9-4455-977d-16db428c60d5 tempest-MultipleCreateTestJSON-655015650 tempest-MultipleCreateTestJSON-655015650-project-member] Lock "039393b3-b017-41b4-ab38-7675d72101d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.022s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.966276] env[61440]: DEBUG nova.compute.manager [None req-a9d2f23c-4085-41d1-92c9-2a49665af44b tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 20fcd3fa-cc08-4b89-af9d-5a6241864946] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.989186] env[61440]: DEBUG nova.compute.manager [None req-a9d2f23c-4085-41d1-92c9-2a49665af44b tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 20fcd3fa-cc08-4b89-af9d-5a6241864946] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.009975] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9d2f23c-4085-41d1-92c9-2a49665af44b tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "20fcd3fa-cc08-4b89-af9d-5a6241864946" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.618s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.021270] env[61440]: DEBUG nova.compute.manager [None req-133a0b20-ffc8-48e8-a046-42046e7b99cd tempest-InstanceActionsV221TestJSON-2096217336 tempest-InstanceActionsV221TestJSON-2096217336-project-member] [instance: dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.057377] env[61440]: DEBUG nova.compute.manager [None req-133a0b20-ffc8-48e8-a046-42046e7b99cd tempest-InstanceActionsV221TestJSON-2096217336 tempest-InstanceActionsV221TestJSON-2096217336-project-member] [instance: dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.079633] env[61440]: DEBUG oslo_concurrency.lockutils [None req-133a0b20-ffc8-48e8-a046-42046e7b99cd tempest-InstanceActionsV221TestJSON-2096217336 tempest-InstanceActionsV221TestJSON-2096217336-project-member] Lock "dc5833e5-5db8-4eee-a05b-bc1b1b3ebe05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.358s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.088664] env[61440]: DEBUG nova.compute.manager [None req-c06cdf45-2158-4786-9319-66a32cfccec2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: a925189b-1de9-4c1c-bdec-b10db97b85d4] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.111014] env[61440]: DEBUG nova.compute.manager [None req-c06cdf45-2158-4786-9319-66a32cfccec2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: a925189b-1de9-4c1c-bdec-b10db97b85d4] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.130650] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c06cdf45-2158-4786-9319-66a32cfccec2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "a925189b-1de9-4c1c-bdec-b10db97b85d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.115s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.139950] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.202777] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.203047] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.204484] env[61440]: INFO nova.compute.claims [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1241.273686] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4f12f07-5bb9-4473-a432-5f584e5509cb tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Acquiring lock "41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.273927] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4f12f07-5bb9-4473-a432-5f584e5509cb tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.498529] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011c4cce-943c-45f6-9858-da60217f3e16 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.505931] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7711242b-2412-4b01-a173-2d5e60fc3d7b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.535236] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa883b7-778b-4992-aae7-d500e54fdd68 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.543017] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86968a7b-be66-418d-89a1-60232ea45c37 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.555225] env[61440]: DEBUG nova.compute.provider_tree [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.565242] env[61440]: DEBUG nova.scheduler.client.report [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1241.581660] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.581777] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1241.626282] env[61440]: DEBUG nova.compute.utils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1241.631104] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1241.631104] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1241.638943] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1241.699952] env[61440]: DEBUG nova.policy [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff2de6f588c44fb4872a92c05525d417', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '804c9ed93ef742e18267003d212bd298', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1241.720548] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1241.745898] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1241.746189] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1241.746378] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.746570] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1241.746721] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.746869] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1241.747091] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1241.747261] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1241.747429] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1241.747590] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1241.747780] env[61440]: DEBUG nova.virt.hardware [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1241.748675] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c474c706-4a08-4322-889b-dd4fe252fbdf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.756898] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29845369-6516-4e48-9eab-5340277d541a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.044521] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Successfully created port: fa29d45b-218e-4a37-b423-ddf5556652a9 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1242.743406] env[61440]: DEBUG nova.compute.manager [req-f5ec67e8-5fe1-4896-b2a8-2008e9037006 req-49dd4f02-319f-46d4-9f0c-3a5f20ac1db2 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Received event network-vif-plugged-fa29d45b-218e-4a37-b423-ddf5556652a9 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1242.743670] env[61440]: DEBUG oslo_concurrency.lockutils [req-f5ec67e8-5fe1-4896-b2a8-2008e9037006 req-49dd4f02-319f-46d4-9f0c-3a5f20ac1db2 service nova] Acquiring lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.743837] env[61440]: DEBUG oslo_concurrency.lockutils [req-f5ec67e8-5fe1-4896-b2a8-2008e9037006 req-49dd4f02-319f-46d4-9f0c-3a5f20ac1db2 service nova] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.744019] env[61440]: DEBUG oslo_concurrency.lockutils [req-f5ec67e8-5fe1-4896-b2a8-2008e9037006 req-49dd4f02-319f-46d4-9f0c-3a5f20ac1db2 service nova] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.744201] env[61440]: DEBUG nova.compute.manager [req-f5ec67e8-5fe1-4896-b2a8-2008e9037006 req-49dd4f02-319f-46d4-9f0c-3a5f20ac1db2 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] No waiting events found dispatching network-vif-plugged-fa29d45b-218e-4a37-b423-ddf5556652a9 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1242.744368] env[61440]: WARNING nova.compute.manager [req-f5ec67e8-5fe1-4896-b2a8-2008e9037006 req-49dd4f02-319f-46d4-9f0c-3a5f20ac1db2 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Received unexpected event network-vif-plugged-fa29d45b-218e-4a37-b423-ddf5556652a9 for instance with vm_state building and task_state spawning. [ 1242.860461] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Successfully updated port: fa29d45b-218e-4a37-b423-ddf5556652a9 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1242.873805] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "refresh_cache-e607fbab-cf85-46c0-81a8-5397fc3b9d2d" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.873805] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquired lock "refresh_cache-e607fbab-cf85-46c0-81a8-5397fc3b9d2d" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.873805] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1242.918482] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1243.092832] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Updating instance_info_cache with network_info: [{"id": "fa29d45b-218e-4a37-b423-ddf5556652a9", "address": "fa:16:3e:27:b8:c6", "network": {"id": "7c8555c5-4e25-4e73-800d-762a949c0add", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1786087589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "804c9ed93ef742e18267003d212bd298", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa29d45b-21", "ovs_interfaceid": "fa29d45b-218e-4a37-b423-ddf5556652a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.104194] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Releasing lock "refresh_cache-e607fbab-cf85-46c0-81a8-5397fc3b9d2d" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.104479] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance network_info: |[{"id": "fa29d45b-218e-4a37-b423-ddf5556652a9", "address": "fa:16:3e:27:b8:c6", "network": {"id": "7c8555c5-4e25-4e73-800d-762a949c0add", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1786087589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "804c9ed93ef742e18267003d212bd298", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa29d45b-21", "ovs_interfaceid": "fa29d45b-218e-4a37-b423-ddf5556652a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1243.104872] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:b8:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa29d45b-218e-4a37-b423-ddf5556652a9', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.112253] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Creating folder: Project (804c9ed93ef742e18267003d212bd298). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1243.112747] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f97461e-9ed2-44b9-9ce5-0b3b83575042 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.126576] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Created folder: Project (804c9ed93ef742e18267003d212bd298) in parent group-v843372. [ 1243.126761] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Creating folder: Instances. Parent ref: group-v843446. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1243.126990] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca28cb4f-a679-4ca6-b5c3-0e91cbdd1b4e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.138145] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Created folder: Instances in parent group-v843446. [ 1243.138401] env[61440]: DEBUG oslo.service.loopingcall [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1243.138601] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1243.138811] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c6185b3-728c-4609-b3d3-f8e1843bbc7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.158679] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.158679] env[61440]: value = "task-4281327" [ 1243.158679] env[61440]: _type = "Task" [ 1243.158679] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.166285] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281327, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.669977] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281327, 'name': CreateVM_Task, 'duration_secs': 0.310754} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.670173] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1243.670873] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.671046] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.671435] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1243.673054] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b82f92-64c2-48f2-af5d-166ddc3e0899 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.676334] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Waiting for the task: (returnval){ [ 1243.676334] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52543c56-140a-9c24-6f10-f56581d84bde" [ 1243.676334] env[61440]: _type = "Task" [ 1243.676334] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.683981] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52543c56-140a-9c24-6f10-f56581d84bde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.187792] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.188155] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.188312] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.803656] env[61440]: DEBUG nova.compute.manager [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Received event network-changed-fa29d45b-218e-4a37-b423-ddf5556652a9 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1244.803862] env[61440]: DEBUG nova.compute.manager [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Refreshing instance network info cache due to event network-changed-fa29d45b-218e-4a37-b423-ddf5556652a9. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1244.804103] env[61440]: DEBUG oslo_concurrency.lockutils [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] Acquiring lock "refresh_cache-e607fbab-cf85-46c0-81a8-5397fc3b9d2d" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.804405] env[61440]: DEBUG oslo_concurrency.lockutils [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] Acquired lock "refresh_cache-e607fbab-cf85-46c0-81a8-5397fc3b9d2d" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.804644] env[61440]: DEBUG nova.network.neutron [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Refreshing network info cache for port fa29d45b-218e-4a37-b423-ddf5556652a9 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1245.149051] env[61440]: DEBUG nova.network.neutron [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Updated VIF entry in instance network info cache for port fa29d45b-218e-4a37-b423-ddf5556652a9. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1245.149443] env[61440]: DEBUG nova.network.neutron [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Updating instance_info_cache with network_info: [{"id": "fa29d45b-218e-4a37-b423-ddf5556652a9", "address": "fa:16:3e:27:b8:c6", "network": {"id": "7c8555c5-4e25-4e73-800d-762a949c0add", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1786087589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "804c9ed93ef742e18267003d212bd298", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa29d45b-21", "ovs_interfaceid": "fa29d45b-218e-4a37-b423-ddf5556652a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.159844] env[61440]: DEBUG oslo_concurrency.lockutils [req-42a62225-595f-4d5f-802e-298a546fb739 req-68426499-de76-4042-aee2-3fa1dd2ab620 service nova] Releasing lock "refresh_cache-e607fbab-cf85-46c0-81a8-5397fc3b9d2d" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.808876] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.526759] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.527071] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.863069] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.863446] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.863634] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1281.274726] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.275239] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.275502] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1283.275538] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1283.298853] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299018] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299152] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299280] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299405] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299527] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299644] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299761] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299874] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.299991] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1283.300124] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1283.300609] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.274179] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.269669] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.275108] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.275108] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.286396] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.286631] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.286805] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.287066] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1288.288349] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de104270-0e71-45ed-8437-eea5f8ce82ec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.297394] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21318483-4203-4c71-875d-47b6d8b6846e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.312721] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e648af11-b212-439d-8382-87eef6f4da5f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.319042] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c5dcef-8185-44f9-81b6-f0bc3a913f82 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.351392] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180677MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1288.351549] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.351743] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.428203] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.428384] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 23b7562f-035c-487f-a1f2-279b69ca4355 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.428511] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.428633] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.428752] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.428870] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.428987] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.429119] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.429239] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.429351] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.444615] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.455777] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.466641] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 0244475d-98ff-4801-a648-6728f85171ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.476260] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.486556] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9349b760-746f-40fa-998d-fdcb325431b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.496195] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 97f08952-2a93-4e0c-9e46-31fc421a0291 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.505877] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e0872184-53cf-46b9-826d-f48c83506911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.516219] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 607c5b5e-84b8-458d-a430-5171095922f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.525322] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.535659] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.535913] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1288.536080] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1288.750305] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7ad59c-60f4-4980-ad96-45205f358038 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.758863] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2400d9e5-d9b9-4e66-8364-ecdae3876627 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.787932] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1c50fb-4b29-4aee-abfe-c5fec573f6d9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.795286] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4be856-d463-4d4a-8641-f07dce32ba63 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.808054] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.817556] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1288.831843] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1288.832035] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.480s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.160674] env[61440]: WARNING oslo_vmware.rw_handles [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1289.160674] env[61440]: ERROR oslo_vmware.rw_handles [ 1289.161234] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1289.163038] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1289.163320] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Copying Virtual Disk [datastore2] vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/2bdeace4-afbf-48cf-9798-5c6335610755/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1289.163615] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d184252-458d-46b7-ad12-b3b1f94689b4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.171099] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Waiting for the task: (returnval){ [ 1289.171099] env[61440]: value = "task-4281328" [ 1289.171099] env[61440]: _type = "Task" [ 1289.171099] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.178971] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Task: {'id': task-4281328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.681493] env[61440]: DEBUG oslo_vmware.exceptions [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1289.681812] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.682404] env[61440]: ERROR nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1289.682404] env[61440]: Faults: ['InvalidArgument'] [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Traceback (most recent call last): [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] yield resources [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self.driver.spawn(context, instance, image_meta, [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self._fetch_image_if_missing(context, vi) [ 1289.682404] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] image_cache(vi, tmp_image_ds_loc) [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] vm_util.copy_virtual_disk( [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] session._wait_for_task(vmdk_copy_task) [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] return self.wait_for_task(task_ref) [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] return evt.wait() [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] result = hub.switch() [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1289.682749] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] return self.greenlet.switch() [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self.f(*self.args, **self.kw) [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] raise exceptions.translate_fault(task_info.error) [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Faults: ['InvalidArgument'] [ 1289.683117] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] [ 1289.683117] env[61440]: INFO nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Terminating instance [ 1289.684346] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.684555] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1289.684796] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-349b50f4-0e02-46e2-b273-cd00e969c93c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.687034] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1289.687244] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1289.687981] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca7bc47-751b-4c83-b33a-bcf4d53d6064 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.694888] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1289.695141] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37764f16-53d1-4cd7-9793-2e270184b3de {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.697309] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1289.697486] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1289.698431] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c60c57e2-ca28-4375-8b32-c2d896de49c8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.703227] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Waiting for the task: (returnval){ [ 1289.703227] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52108772-2547-4974-aec2-19c199c49cfa" [ 1289.703227] env[61440]: _type = "Task" [ 1289.703227] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.711628] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52108772-2547-4974-aec2-19c199c49cfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.771340] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1289.771556] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1289.771737] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Deleting the datastore file [datastore2] 63fdeef4-93e6-408c-9b37-33bf3532a6e8 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.772046] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae13aaf6-6b8d-4567-acad-a9f77bf8317d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.779171] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Waiting for the task: (returnval){ [ 1289.779171] env[61440]: value = "task-4281330" [ 1289.779171] env[61440]: _type = "Task" [ 1289.779171] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.787195] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Task: {'id': task-4281330, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.213777] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1290.214202] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Creating directory with path [datastore2] vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1290.214553] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce5ed3fe-da24-4639-ba81-feeb9d120034 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.226037] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Created directory with path [datastore2] vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1290.226257] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Fetch image to [datastore2] vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1290.226471] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1290.227691] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b83838f-b29e-48e4-a02d-a722bb6ce1b7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.234466] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f37cc6-0e7c-49f0-8fb5-df493d52987e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.243982] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4fe164-0f71-4c2f-b98d-a935de7bbd1d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.274751] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277a850c-a2eb-4739-8943-ca84daf951e9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.283174] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1fb4a882-3dfe-4531-9f56-32d8bbda1c87 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.289350] env[61440]: DEBUG oslo_vmware.api [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Task: {'id': task-4281330, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065758} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.289601] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1290.289755] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1290.289933] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1290.290126] env[61440]: INFO nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1290.292257] env[61440]: DEBUG nova.compute.claims [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1290.292429] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.292638] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.306029] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1290.461088] env[61440]: DEBUG oslo_vmware.rw_handles [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1290.522723] env[61440]: DEBUG oslo_vmware.rw_handles [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1290.522723] env[61440]: DEBUG oslo_vmware.rw_handles [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1290.635046] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5803519d-6517-426c-9d67-5f57137778e4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.643028] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df4c628-6d2c-4b52-b9c9-f316911efa72 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.674852] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3c32a5-74e6-4d56-95c9-1f73b3d129d9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.682590] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b25d02-e602-4b10-8197-745266b765a9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.696926] env[61440]: DEBUG nova.compute.provider_tree [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.706540] env[61440]: DEBUG nova.scheduler.client.report [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1290.720459] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.428s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.720982] env[61440]: ERROR nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1290.720982] env[61440]: Faults: ['InvalidArgument'] [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Traceback (most recent call last): [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self.driver.spawn(context, instance, image_meta, [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self._fetch_image_if_missing(context, vi) [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] image_cache(vi, tmp_image_ds_loc) [ 1290.720982] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] vm_util.copy_virtual_disk( [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] session._wait_for_task(vmdk_copy_task) [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] return self.wait_for_task(task_ref) [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] return evt.wait() [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] result = hub.switch() [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] return self.greenlet.switch() [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1290.721304] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] self.f(*self.args, **self.kw) [ 1290.721586] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1290.721586] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] raise exceptions.translate_fault(task_info.error) [ 1290.721586] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1290.721586] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Faults: ['InvalidArgument'] [ 1290.721586] env[61440]: ERROR nova.compute.manager [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] [ 1290.721707] env[61440]: DEBUG nova.compute.utils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1290.723214] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Build of instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 was re-scheduled: A specified parameter was not correct: fileType [ 1290.723214] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1290.723594] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1290.723771] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1290.723934] env[61440]: DEBUG nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1290.724114] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1291.043546] env[61440]: DEBUG nova.network.neutron [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.056936] env[61440]: INFO nova.compute.manager [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Took 0.33 seconds to deallocate network for instance. [ 1291.150012] env[61440]: INFO nova.scheduler.client.report [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Deleted allocations for instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 [ 1291.170360] env[61440]: DEBUG oslo_concurrency.lockutils [None req-65ffa98e-b370-4064-98a4-95d64f4d6e28 tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 691.256s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.171583] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 493.060s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.171820] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.172057] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.172257] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.174565] env[61440]: INFO nova.compute.manager [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Terminating instance [ 1291.176873] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquiring lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.177084] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Acquired lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.177281] env[61440]: DEBUG nova.network.neutron [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1291.186891] env[61440]: DEBUG nova.compute.manager [None req-e6e015cf-eca5-42e4-9c87-140d5028c534 tempest-AttachInterfacesUnderV243Test-186038686 tempest-AttachInterfacesUnderV243Test-186038686-project-member] [instance: 047b54df-a55e-4e18-87f7-835466d9581e] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1291.206220] env[61440]: DEBUG nova.network.neutron [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1291.218533] env[61440]: DEBUG nova.compute.manager [None req-e6e015cf-eca5-42e4-9c87-140d5028c534 tempest-AttachInterfacesUnderV243Test-186038686 tempest-AttachInterfacesUnderV243Test-186038686-project-member] [instance: 047b54df-a55e-4e18-87f7-835466d9581e] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1291.242211] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e6e015cf-eca5-42e4-9c87-140d5028c534 tempest-AttachInterfacesUnderV243Test-186038686 tempest-AttachInterfacesUnderV243Test-186038686-project-member] Lock "047b54df-a55e-4e18-87f7-835466d9581e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.873s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.251672] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1291.303974] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.303974] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.303974] env[61440]: INFO nova.compute.claims [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.585259] env[61440]: DEBUG nova.network.neutron [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.594660] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Releasing lock "refresh_cache-63fdeef4-93e6-408c-9b37-33bf3532a6e8" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.595945] env[61440]: DEBUG nova.compute.manager [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1291.595945] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1291.595945] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2312e07b-febd-4d3a-8dee-47dbd2489f40 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.607946] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef0d874-b23e-466e-9b80-86e7743d69ba {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.641151] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63fdeef4-93e6-408c-9b37-33bf3532a6e8 could not be found. [ 1291.641151] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1291.641320] env[61440]: INFO nova.compute.manager [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1291.641615] env[61440]: DEBUG oslo.service.loopingcall [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1291.642721] env[61440]: DEBUG nova.compute.manager [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1291.642838] env[61440]: DEBUG nova.network.neutron [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1291.645070] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c3ba7d-21b2-499f-a94e-64d0eb4cf1c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.651957] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bc3f25-b1d1-4b82-8132-0ec566e511a2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.683398] env[61440]: DEBUG nova.network.neutron [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1291.685076] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cda3b3a-389f-4e57-a8a4-0541c8f18add {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.692829] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe580a5-4727-4cbb-b2ba-074b8723eb66 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.696727] env[61440]: DEBUG nova.network.neutron [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.708163] env[61440]: DEBUG nova.compute.provider_tree [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.709371] env[61440]: INFO nova.compute.manager [-] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] Took 0.07 seconds to deallocate network for instance. [ 1291.717045] env[61440]: DEBUG nova.scheduler.client.report [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1291.734248] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.735029] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1291.776499] env[61440]: DEBUG nova.compute.utils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1291.778562] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1291.778738] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1291.787254] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1291.835020] env[61440]: DEBUG oslo_concurrency.lockutils [None req-617aaab6-fe0e-4aff-8212-f614756a26ad tempest-ServerMetadataTestJSON-1547182647 tempest-ServerMetadataTestJSON-1547182647-project-member] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.663s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.835933] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 146.207s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.836026] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 63fdeef4-93e6-408c-9b37-33bf3532a6e8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1291.836962] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "63fdeef4-93e6-408c-9b37-33bf3532a6e8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.851066] env[61440]: DEBUG nova.policy [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6730b00fdc9d428e8c290ff995c990bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'caf86bc289974963bc0dea063f2b7892', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1291.860203] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1291.885170] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1291.885351] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1291.885476] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1291.885651] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1291.885802] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1291.885953] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1291.886182] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1291.886367] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1291.886538] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1291.886703] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1291.886878] env[61440]: DEBUG nova.virt.hardware [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1291.887747] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a938c7-7e3c-4039-af38-bfc318e53894 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.895396] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8effa8f-c279-44e8-975d-79c55d2faae9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.167408] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Successfully created port: a208ebf6-cad3-4c72-ad4b-68f6f72ea933 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1292.878735] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Successfully updated port: a208ebf6-cad3-4c72-ad4b-68f6f72ea933 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1292.890809] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "refresh_cache-269e724a-100e-4112-9c06-8a36871538ac" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.890971] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquired lock "refresh_cache-269e724a-100e-4112-9c06-8a36871538ac" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.891140] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1292.968352] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1293.084423] env[61440]: DEBUG nova.compute.manager [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Received event network-vif-plugged-a208ebf6-cad3-4c72-ad4b-68f6f72ea933 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1293.084642] env[61440]: DEBUG oslo_concurrency.lockutils [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] Acquiring lock "269e724a-100e-4112-9c06-8a36871538ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.084849] env[61440]: DEBUG oslo_concurrency.lockutils [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] Lock "269e724a-100e-4112-9c06-8a36871538ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.085221] env[61440]: DEBUG oslo_concurrency.lockutils [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] Lock "269e724a-100e-4112-9c06-8a36871538ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.085424] env[61440]: DEBUG nova.compute.manager [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] No waiting events found dispatching network-vif-plugged-a208ebf6-cad3-4c72-ad4b-68f6f72ea933 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1293.085601] env[61440]: WARNING nova.compute.manager [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Received unexpected event network-vif-plugged-a208ebf6-cad3-4c72-ad4b-68f6f72ea933 for instance with vm_state building and task_state spawning. [ 1293.085773] env[61440]: DEBUG nova.compute.manager [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Received event network-changed-a208ebf6-cad3-4c72-ad4b-68f6f72ea933 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1293.085937] env[61440]: DEBUG nova.compute.manager [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Refreshing instance network info cache due to event network-changed-a208ebf6-cad3-4c72-ad4b-68f6f72ea933. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1293.086123] env[61440]: DEBUG oslo_concurrency.lockutils [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] Acquiring lock "refresh_cache-269e724a-100e-4112-9c06-8a36871538ac" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.155475] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Updating instance_info_cache with network_info: [{"id": "a208ebf6-cad3-4c72-ad4b-68f6f72ea933", "address": "fa:16:3e:ed:f2:71", "network": {"id": "66caa4a7-db0b-48a5-8435-33046511ff89", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-498572970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "caf86bc289974963bc0dea063f2b7892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa208ebf6-ca", "ovs_interfaceid": "a208ebf6-cad3-4c72-ad4b-68f6f72ea933", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.168487] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Releasing lock "refresh_cache-269e724a-100e-4112-9c06-8a36871538ac" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.168773] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance network_info: |[{"id": "a208ebf6-cad3-4c72-ad4b-68f6f72ea933", "address": "fa:16:3e:ed:f2:71", "network": {"id": "66caa4a7-db0b-48a5-8435-33046511ff89", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-498572970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "caf86bc289974963bc0dea063f2b7892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa208ebf6-ca", "ovs_interfaceid": "a208ebf6-cad3-4c72-ad4b-68f6f72ea933", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1293.169075] env[61440]: DEBUG oslo_concurrency.lockutils [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] Acquired lock "refresh_cache-269e724a-100e-4112-9c06-8a36871538ac" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.169264] env[61440]: DEBUG nova.network.neutron [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Refreshing network info cache for port a208ebf6-cad3-4c72-ad4b-68f6f72ea933 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1293.170301] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:f2:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a208ebf6-cad3-4c72-ad4b-68f6f72ea933', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1293.178389] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Creating folder: Project (caf86bc289974963bc0dea063f2b7892). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1293.179289] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35395b14-9a56-4934-b6ba-eb1bc2e6da4b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.192523] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Created folder: Project (caf86bc289974963bc0dea063f2b7892) in parent group-v843372. [ 1293.192523] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Creating folder: Instances. Parent ref: group-v843449. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1293.192739] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-057a0efb-7ef4-4bb6-b1cd-2f0c0a73565f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.201282] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Created folder: Instances in parent group-v843449. [ 1293.201501] env[61440]: DEBUG oslo.service.loopingcall [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.201674] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1293.201859] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8fa6af6-08eb-45ae-ad5c-d4e2324e2091 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.224244] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1293.224244] env[61440]: value = "task-4281333" [ 1293.224244] env[61440]: _type = "Task" [ 1293.224244] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.231274] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281333, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.519261] env[61440]: DEBUG nova.network.neutron [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Updated VIF entry in instance network info cache for port a208ebf6-cad3-4c72-ad4b-68f6f72ea933. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1293.519677] env[61440]: DEBUG nova.network.neutron [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Updating instance_info_cache with network_info: [{"id": "a208ebf6-cad3-4c72-ad4b-68f6f72ea933", "address": "fa:16:3e:ed:f2:71", "network": {"id": "66caa4a7-db0b-48a5-8435-33046511ff89", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-498572970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "caf86bc289974963bc0dea063f2b7892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa208ebf6-ca", "ovs_interfaceid": "a208ebf6-cad3-4c72-ad4b-68f6f72ea933", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.529925] env[61440]: DEBUG oslo_concurrency.lockutils [req-d0bf8124-c49a-4668-9223-c4191f2ee6b9 req-8a8ae11a-c64f-4eeb-907f-f030395d6b28 service nova] Releasing lock "refresh_cache-269e724a-100e-4112-9c06-8a36871538ac" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.734554] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281333, 'name': CreateVM_Task, 'duration_secs': 0.296223} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.736024] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1293.736024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.736024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.736247] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1293.736588] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b50f92-9e1a-4eaf-a069-5b99d565fb3a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.742054] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Waiting for the task: (returnval){ [ 1293.742054] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a213bf-0779-1972-ae87-a92291ec2a2c" [ 1293.742054] env[61440]: _type = "Task" [ 1293.742054] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.748677] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a213bf-0779-1972-ae87-a92291ec2a2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.252281] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.252594] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1294.252813] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1298.012835] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "269e724a-100e-4112-9c06-8a36871538ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.672752] env[61440]: DEBUG oslo_concurrency.lockutils [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.733053] env[61440]: WARNING oslo_vmware.rw_handles [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1339.733053] env[61440]: ERROR oslo_vmware.rw_handles [ 1339.733053] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1339.734635] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1339.735067] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Copying Virtual Disk [datastore2] vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/e44b011a-45aa-40ea-9dee-9ce54e0a4268/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1339.735403] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0369f98-8975-4a09-a04e-776bae14f3ac {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.743306] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Waiting for the task: (returnval){ [ 1339.743306] env[61440]: value = "task-4281334" [ 1339.743306] env[61440]: _type = "Task" [ 1339.743306] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.753568] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Task: {'id': task-4281334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.253715] env[61440]: DEBUG oslo_vmware.exceptions [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1340.254125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.254758] env[61440]: ERROR nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1340.254758] env[61440]: Faults: ['InvalidArgument'] [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Traceback (most recent call last): [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] yield resources [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self.driver.spawn(context, instance, image_meta, [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self._fetch_image_if_missing(context, vi) [ 1340.254758] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] image_cache(vi, tmp_image_ds_loc) [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] vm_util.copy_virtual_disk( [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] session._wait_for_task(vmdk_copy_task) [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] return self.wait_for_task(task_ref) [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] return evt.wait() [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] result = hub.switch() [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1340.255140] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] return self.greenlet.switch() [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self.f(*self.args, **self.kw) [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] raise exceptions.translate_fault(task_info.error) [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Faults: ['InvalidArgument'] [ 1340.255447] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] [ 1340.255447] env[61440]: INFO nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Terminating instance [ 1340.256733] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.257902] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.258542] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1340.258738] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1340.258977] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46e6fe03-abbe-4b2d-89a3-241ee40f88df {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.261141] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fc293d-2d38-4423-b651-a457d6b74e85 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.267750] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1340.267952] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc31b07a-0b9a-4198-9e94-f9422b8d80b4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.269948] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.270141] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1340.271119] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-560ddce0-10d2-4f0b-976e-54a369c4ddde {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.275751] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Waiting for the task: (returnval){ [ 1340.275751] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5227cc01-5b81-af34-fee1-9ce19e3a59a1" [ 1340.275751] env[61440]: _type = "Task" [ 1340.275751] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.282602] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5227cc01-5b81-af34-fee1-9ce19e3a59a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.337292] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1340.337505] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1340.337682] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Deleting the datastore file [datastore2] 23b7562f-035c-487f-a1f2-279b69ca4355 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1340.338034] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24658069-d379-4c3f-be88-7b857c773353 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.344409] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Waiting for the task: (returnval){ [ 1340.344409] env[61440]: value = "task-4281336" [ 1340.344409] env[61440]: _type = "Task" [ 1340.344409] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.351985] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Task: {'id': task-4281336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.786638] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1340.786939] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Creating directory with path [datastore2] vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.787163] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16bec739-d377-47d6-85ee-9bb055e1c05a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.798212] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Created directory with path [datastore2] vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.798364] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Fetch image to [datastore2] vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1340.798544] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1340.799304] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bc12c7-adbc-459b-8c5c-d180c7e433d7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.805951] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bbf036-c1f5-4146-aeab-61edc2655c7f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.814510] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d2cd06-606a-44a0-b71e-527b91ecacec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.844057] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.844998] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83374f3-7ba0-47f8-bf31-7a430cf86256 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.855733] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-af46b01d-e6c2-4f52-834c-9cac0d99c31c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.857409] env[61440]: DEBUG oslo_vmware.api [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Task: {'id': task-4281336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064888} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.857647] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.857853] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1340.858049] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1340.858226] env[61440]: INFO nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1340.860344] env[61440]: DEBUG nova.compute.claims [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1340.860514] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.860735] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.884143] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1341.015255] env[61440]: DEBUG oslo_vmware.rw_handles [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1341.075726] env[61440]: DEBUG oslo_vmware.rw_handles [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1341.075917] env[61440]: DEBUG oslo_vmware.rw_handles [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1341.184431] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4172e15-7bee-4bbe-a4ed-4825a10ad6c5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.193019] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf12f5a-4e7a-4ce0-82be-a63cf8931f43 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.224757] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef306dee-eb09-4b60-8a06-2397fa28de16 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.232430] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d28b494-8f1b-4bd5-aee0-3a9cf55ea0a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.245669] env[61440]: DEBUG nova.compute.provider_tree [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.254632] env[61440]: DEBUG nova.scheduler.client.report [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1341.272132] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.411s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.272727] env[61440]: ERROR nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1341.272727] env[61440]: Faults: ['InvalidArgument'] [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Traceback (most recent call last): [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self.driver.spawn(context, instance, image_meta, [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self._fetch_image_if_missing(context, vi) [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] image_cache(vi, tmp_image_ds_loc) [ 1341.272727] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] vm_util.copy_virtual_disk( [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] session._wait_for_task(vmdk_copy_task) [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] return self.wait_for_task(task_ref) [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] return evt.wait() [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] result = hub.switch() [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] return self.greenlet.switch() [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1341.273079] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] self.f(*self.args, **self.kw) [ 1341.273409] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1341.273409] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] raise exceptions.translate_fault(task_info.error) [ 1341.273409] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1341.273409] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Faults: ['InvalidArgument'] [ 1341.273409] env[61440]: ERROR nova.compute.manager [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] [ 1341.273528] env[61440]: DEBUG nova.compute.utils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1341.274964] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.275134] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1341.275569] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Build of instance 23b7562f-035c-487f-a1f2-279b69ca4355 was re-scheduled: A specified parameter was not correct: fileType [ 1341.275569] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1341.275959] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1341.276151] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1341.276325] env[61440]: DEBUG nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1341.276492] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1341.648445] env[61440]: DEBUG nova.network.neutron [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.663640] env[61440]: INFO nova.compute.manager [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Took 0.39 seconds to deallocate network for instance. [ 1341.776019] env[61440]: INFO nova.scheduler.client.report [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Deleted allocations for instance 23b7562f-035c-487f-a1f2-279b69ca4355 [ 1341.801735] env[61440]: DEBUG oslo_concurrency.lockutils [None req-568f2300-e88b-41fb-b7a5-155df9360a68 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "23b7562f-035c-487f-a1f2-279b69ca4355" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 688.831s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.803052] env[61440]: DEBUG oslo_concurrency.lockutils [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "23b7562f-035c-487f-a1f2-279b69ca4355" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 493.007s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.803337] env[61440]: DEBUG oslo_concurrency.lockutils [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Acquiring lock "23b7562f-035c-487f-a1f2-279b69ca4355-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.803597] env[61440]: DEBUG oslo_concurrency.lockutils [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "23b7562f-035c-487f-a1f2-279b69ca4355-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.803749] env[61440]: DEBUG oslo_concurrency.lockutils [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "23b7562f-035c-487f-a1f2-279b69ca4355-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.806186] env[61440]: INFO nova.compute.manager [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Terminating instance [ 1341.810022] env[61440]: DEBUG nova.compute.manager [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1341.810022] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1341.810022] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59aee6e8-8114-469f-8eea-4425fce933e9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.819690] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df5d456-8549-4d7d-8feb-2d02d42c9a39 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.831706] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1341.853269] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 23b7562f-035c-487f-a1f2-279b69ca4355 could not be found. [ 1341.853478] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1341.853664] env[61440]: INFO nova.compute.manager [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1341.853922] env[61440]: DEBUG oslo.service.loopingcall [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.854429] env[61440]: DEBUG nova.compute.manager [-] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1341.854534] env[61440]: DEBUG nova.network.neutron [-] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1341.900499] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.900743] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.902410] env[61440]: INFO nova.compute.claims [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.910065] env[61440]: DEBUG nova.network.neutron [-] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.922923] env[61440]: INFO nova.compute.manager [-] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] Took 0.07 seconds to deallocate network for instance. [ 1342.034529] env[61440]: DEBUG oslo_concurrency.lockutils [None req-025f0fc8-9472-46cb-a6b4-b299f7d71220 tempest-ImagesOneServerTestJSON-926181390 tempest-ImagesOneServerTestJSON-926181390-project-member] Lock "23b7562f-035c-487f-a1f2-279b69ca4355" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.231s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.035841] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "23b7562f-035c-487f-a1f2-279b69ca4355" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 196.407s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.036139] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 23b7562f-035c-487f-a1f2-279b69ca4355] During sync_power_state the instance has a pending task (deleting). Skip. [ 1342.036312] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "23b7562f-035c-487f-a1f2-279b69ca4355" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.210965] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f121c2b6-bf30-40e7-81cc-343e2055a43c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.218913] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0749babb-d80d-4efd-8fae-6fcad4df286a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.250022] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c2d7fa-e1b1-4afd-b125-76cd477704f4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.257564] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550a5df0-9aae-4b9d-b815-cc0759622ec8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.271346] env[61440]: DEBUG nova.compute.provider_tree [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.277329] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1342.282740] env[61440]: DEBUG nova.scheduler.client.report [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1342.298861] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.398s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.299476] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1342.335614] env[61440]: DEBUG nova.compute.utils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1342.337676] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1342.337940] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1342.348276] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1342.422033] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1342.425641] env[61440]: DEBUG nova.policy [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58d29102bf5c44a592ba08c8fb319606', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2701e3de21f42a7a286b54ec498da89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1342.453463] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1342.453715] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1342.453875] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.454073] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1342.454229] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.454381] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1342.454599] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1342.454786] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1342.454961] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1342.455150] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1342.455324] env[61440]: DEBUG nova.virt.hardware [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1342.456198] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d9f9bb-e4cc-4ada-a9cf-6dbee9b23357 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.464738] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64879b3-e02a-4c40-9ab3-a815c865a5a1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.845775] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Successfully created port: 9d2ec0b9-a431-4124-9029-39bb5bc99409 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1343.919103] env[61440]: DEBUG nova.compute.manager [req-853d08c3-1167-4783-b7d4-a024b685a01c req-1acfab38-ad55-4131-a541-0b59b0f4600c service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Received event network-vif-plugged-9d2ec0b9-a431-4124-9029-39bb5bc99409 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1343.923447] env[61440]: DEBUG oslo_concurrency.lockutils [req-853d08c3-1167-4783-b7d4-a024b685a01c req-1acfab38-ad55-4131-a541-0b59b0f4600c service nova] Acquiring lock "608ac5c2-3518-4da0-992f-a752584165a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.923830] env[61440]: DEBUG oslo_concurrency.lockutils [req-853d08c3-1167-4783-b7d4-a024b685a01c req-1acfab38-ad55-4131-a541-0b59b0f4600c service nova] Lock "608ac5c2-3518-4da0-992f-a752584165a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.924170] env[61440]: DEBUG oslo_concurrency.lockutils [req-853d08c3-1167-4783-b7d4-a024b685a01c req-1acfab38-ad55-4131-a541-0b59b0f4600c service nova] Lock "608ac5c2-3518-4da0-992f-a752584165a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.924474] env[61440]: DEBUG nova.compute.manager [req-853d08c3-1167-4783-b7d4-a024b685a01c req-1acfab38-ad55-4131-a541-0b59b0f4600c service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] No waiting events found dispatching network-vif-plugged-9d2ec0b9-a431-4124-9029-39bb5bc99409 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1343.924783] env[61440]: WARNING nova.compute.manager [req-853d08c3-1167-4783-b7d4-a024b685a01c req-1acfab38-ad55-4131-a541-0b59b0f4600c service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Received unexpected event network-vif-plugged-9d2ec0b9-a431-4124-9029-39bb5bc99409 for instance with vm_state building and task_state spawning. [ 1344.046026] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Successfully updated port: 9d2ec0b9-a431-4124-9029-39bb5bc99409 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1344.061765] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "refresh_cache-608ac5c2-3518-4da0-992f-a752584165a7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.061810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired lock "refresh_cache-608ac5c2-3518-4da0-992f-a752584165a7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.061936] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1344.131953] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1344.273888] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1344.274077] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1344.274207] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1344.303721] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.303887] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304035] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304170] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304295] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304416] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304536] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304655] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304771] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.304887] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1344.305014] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1344.395783] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Updating instance_info_cache with network_info: [{"id": "9d2ec0b9-a431-4124-9029-39bb5bc99409", "address": "fa:16:3e:00:19:94", "network": {"id": "fcf6a284-5561-4015-a08c-9ea5ed0cdcaa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1432724510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2701e3de21f42a7a286b54ec498da89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d2ec0b9-a4", "ovs_interfaceid": "9d2ec0b9-a431-4124-9029-39bb5bc99409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.411030] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Releasing lock "refresh_cache-608ac5c2-3518-4da0-992f-a752584165a7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.411030] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance network_info: |[{"id": "9d2ec0b9-a431-4124-9029-39bb5bc99409", "address": "fa:16:3e:00:19:94", "network": {"id": "fcf6a284-5561-4015-a08c-9ea5ed0cdcaa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1432724510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2701e3de21f42a7a286b54ec498da89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d2ec0b9-a4", "ovs_interfaceid": "9d2ec0b9-a431-4124-9029-39bb5bc99409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1344.411876] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:19:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721e64ee-fc02-4eb5-9c8c-ea55647a1b92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d2ec0b9-a431-4124-9029-39bb5bc99409', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.419603] env[61440]: DEBUG oslo.service.loopingcall [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.420104] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1344.420374] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d91e067-0d98-4604-bb23-9e036f57d3e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.441293] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.441293] env[61440]: value = "task-4281337" [ 1344.441293] env[61440]: _type = "Task" [ 1344.441293] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.450177] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281337, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.951643] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281337, 'name': CreateVM_Task, 'duration_secs': 0.303856} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.951918] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1344.952958] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.952958] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.952958] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1344.953241] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-993f4072-317c-4e32-bca2-2b8c6e22ca91 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.958598] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 1344.958598] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f8831d-36a1-d7a2-df1a-b1b6527ea5ec" [ 1344.958598] env[61440]: _type = "Task" [ 1344.958598] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.966582] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f8831d-36a1-d7a2-df1a-b1b6527ea5ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.274611] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.469157] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.469434] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.469655] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.828751] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.829011] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.946484] env[61440]: DEBUG nova.compute.manager [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Received event network-changed-9d2ec0b9-a431-4124-9029-39bb5bc99409 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1345.946687] env[61440]: DEBUG nova.compute.manager [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Refreshing instance network info cache due to event network-changed-9d2ec0b9-a431-4124-9029-39bb5bc99409. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1345.946856] env[61440]: DEBUG oslo_concurrency.lockutils [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] Acquiring lock "refresh_cache-608ac5c2-3518-4da0-992f-a752584165a7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.946999] env[61440]: DEBUG oslo_concurrency.lockutils [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] Acquired lock "refresh_cache-608ac5c2-3518-4da0-992f-a752584165a7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.947176] env[61440]: DEBUG nova.network.neutron [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Refreshing network info cache for port 9d2ec0b9-a431-4124-9029-39bb5bc99409 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1346.284021] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "608ac5c2-3518-4da0-992f-a752584165a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.302512] env[61440]: DEBUG nova.network.neutron [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Updated VIF entry in instance network info cache for port 9d2ec0b9-a431-4124-9029-39bb5bc99409. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1346.303067] env[61440]: DEBUG nova.network.neutron [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Updating instance_info_cache with network_info: [{"id": "9d2ec0b9-a431-4124-9029-39bb5bc99409", "address": "fa:16:3e:00:19:94", "network": {"id": "fcf6a284-5561-4015-a08c-9ea5ed0cdcaa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1432724510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2701e3de21f42a7a286b54ec498da89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d2ec0b9-a4", "ovs_interfaceid": "9d2ec0b9-a431-4124-9029-39bb5bc99409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.312750] env[61440]: DEBUG oslo_concurrency.lockutils [req-917ce473-13c3-4e84-9143-bcffaf9b770b req-58ff6f4f-70a7-4c1d-a071-90e177e1f40d service nova] Releasing lock "refresh_cache-608ac5c2-3518-4da0-992f-a752584165a7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.274597] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.270306] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.270631] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.310025] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.274879] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.289356] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.289574] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.289737] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.289892] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1349.291044] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5794d6-7fd4-4d76-ac25-2bba428fa21c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.299730] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24553077-a44c-449f-9215-49d67a9441ab {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.313793] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280648f5-4c8c-484d-b386-1f64ab96a080 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.320300] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6c7336-c314-4c47-a35c-7fa8c08d4ecf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.348464] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180672MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1349.348664] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.348803] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.434301] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.434469] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.434599] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.434723] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.434872] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.434999] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.435137] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.435257] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.435373] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.435480] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.471344] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.483899] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9349b760-746f-40fa-998d-fdcb325431b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.495144] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 97f08952-2a93-4e0c-9e46-31fc421a0291 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.504360] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e0872184-53cf-46b9-826d-f48c83506911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.516157] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 607c5b5e-84b8-458d-a430-5171095922f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.526728] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.536318] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.546645] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.546880] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1349.547038] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1349.790206] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baadff8-2e00-4b75-8829-1ecc3f34bdb4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.797961] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4aebc7b-6935-4901-bbf9-7caa47184bed {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.828054] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962effdc-c580-45f5-8b8b-cadf51efc809 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.835393] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a082de-5134-43cd-82f1-28ea6c70b4c0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.848969] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.858651] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1349.879364] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1349.879554] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.531s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.012857] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "c307f560-e474-441f-b099-53c2fd290488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.012857] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "c307f560-e474-441f-b099-53c2fd290488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.783849] env[61440]: WARNING oslo_vmware.rw_handles [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1388.783849] env[61440]: ERROR oslo_vmware.rw_handles [ 1388.784454] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1388.786172] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1388.786413] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Copying Virtual Disk [datastore2] vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/3252e3f3-3132-42c3-9f71-223ff92a6792/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1388.786748] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1cb9a89-2442-44ea-a694-0cb617e7227b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.794814] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Waiting for the task: (returnval){ [ 1388.794814] env[61440]: value = "task-4281338" [ 1388.794814] env[61440]: _type = "Task" [ 1388.794814] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.804908] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Task: {'id': task-4281338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.305316] env[61440]: DEBUG oslo_vmware.exceptions [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1389.305814] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.306511] env[61440]: ERROR nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1389.306511] env[61440]: Faults: ['InvalidArgument'] [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Traceback (most recent call last): [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] yield resources [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self.driver.spawn(context, instance, image_meta, [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self._fetch_image_if_missing(context, vi) [ 1389.306511] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] image_cache(vi, tmp_image_ds_loc) [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] vm_util.copy_virtual_disk( [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] session._wait_for_task(vmdk_copy_task) [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] return self.wait_for_task(task_ref) [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] return evt.wait() [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] result = hub.switch() [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1389.306868] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] return self.greenlet.switch() [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self.f(*self.args, **self.kw) [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] raise exceptions.translate_fault(task_info.error) [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Faults: ['InvalidArgument'] [ 1389.307275] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] [ 1389.309156] env[61440]: INFO nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Terminating instance [ 1389.310095] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.310095] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1389.310242] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d60f33f1-4b48-44d4-8c2e-6145c995e39f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.312457] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1389.312649] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1389.313382] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b112ee3-af58-49ab-b0b0-17ed89f55eae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.320597] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1389.321571] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3eecef3-c856-44f7-89f2-718e3d9686b8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.322939] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1389.323145] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1389.323786] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03f3c3d7-be80-4d7e-9aa0-abf72acc851e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.328451] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Waiting for the task: (returnval){ [ 1389.328451] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5224b535-014e-3fe5-95d8-031e32f068fe" [ 1389.328451] env[61440]: _type = "Task" [ 1389.328451] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.335414] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5224b535-014e-3fe5-95d8-031e32f068fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.390288] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1389.390390] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1389.390561] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Deleting the datastore file [datastore2] b8a27ad2-4cc5-4219-9bc3-5735433b153c {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1389.390956] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8df0cb7b-318b-4583-9cce-18881b4303a8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.397434] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Waiting for the task: (returnval){ [ 1389.397434] env[61440]: value = "task-4281340" [ 1389.397434] env[61440]: _type = "Task" [ 1389.397434] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.405444] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Task: {'id': task-4281340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.839164] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1389.839433] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Creating directory with path [datastore2] vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1389.839714] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9693fa64-d32e-46e9-98f9-f0452ea8b155 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.851338] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Created directory with path [datastore2] vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1389.851527] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Fetch image to [datastore2] vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1389.851696] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1389.852407] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c48b0dd-bd00-47df-bd40-b22f8ff0aa30 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.858418] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b8bd70-4811-4179-b421-52b787691deb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.867228] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962fd56d-e516-4c2a-8847-98781bf28196 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.897495] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e70df6f-b176-49ba-90d0-6d9d5ca2a6c5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.907658] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-455dac42-58f6-4ec5-8f50-198017f9cfc0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.909291] env[61440]: DEBUG oslo_vmware.api [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Task: {'id': task-4281340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090463} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.909516] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1389.909704] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1389.909893] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1389.910085] env[61440]: INFO nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1389.912160] env[61440]: DEBUG nova.compute.claims [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1389.912362] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.912574] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.929862] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1390.089858] env[61440]: DEBUG oslo_vmware.rw_handles [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1390.150215] env[61440]: DEBUG oslo_vmware.rw_handles [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1390.150428] env[61440]: DEBUG oslo_vmware.rw_handles [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1390.228432] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce6d204-fc87-454a-ade8-3f07ab215856 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.236131] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bb6032-48b8-46cc-8f54-7890c5ddb2fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.265144] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3228d09d-8a93-4c73-a1bd-c3e91d620901 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.272163] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a678cc-2b49-4386-b8ef-d9f40a662fda {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.286107] env[61440]: DEBUG nova.compute.provider_tree [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.295436] env[61440]: DEBUG nova.scheduler.client.report [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1390.314027] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.401s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.314586] env[61440]: ERROR nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1390.314586] env[61440]: Faults: ['InvalidArgument'] [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Traceback (most recent call last): [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self.driver.spawn(context, instance, image_meta, [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self._fetch_image_if_missing(context, vi) [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] image_cache(vi, tmp_image_ds_loc) [ 1390.314586] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] vm_util.copy_virtual_disk( [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] session._wait_for_task(vmdk_copy_task) [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] return self.wait_for_task(task_ref) [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] return evt.wait() [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] result = hub.switch() [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] return self.greenlet.switch() [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1390.315015] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] self.f(*self.args, **self.kw) [ 1390.315357] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1390.315357] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] raise exceptions.translate_fault(task_info.error) [ 1390.315357] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1390.315357] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Faults: ['InvalidArgument'] [ 1390.315357] env[61440]: ERROR nova.compute.manager [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] [ 1390.315357] env[61440]: DEBUG nova.compute.utils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1390.316853] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Build of instance b8a27ad2-4cc5-4219-9bc3-5735433b153c was re-scheduled: A specified parameter was not correct: fileType [ 1390.316853] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1390.317235] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1390.317408] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1390.317575] env[61440]: DEBUG nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1390.317737] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1390.725182] env[61440]: DEBUG nova.network.neutron [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.736854] env[61440]: INFO nova.compute.manager [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Took 0.42 seconds to deallocate network for instance. [ 1390.843261] env[61440]: INFO nova.scheduler.client.report [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Deleted allocations for instance b8a27ad2-4cc5-4219-9bc3-5735433b153c [ 1390.866538] env[61440]: DEBUG oslo_concurrency.lockutils [None req-74e3173e-13e4-460b-a76e-2b57a49f80fd tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 686.574s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.867705] env[61440]: DEBUG oslo_concurrency.lockutils [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 490.574s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.867935] env[61440]: DEBUG oslo_concurrency.lockutils [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Acquiring lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.868162] env[61440]: DEBUG oslo_concurrency.lockutils [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.868333] env[61440]: DEBUG oslo_concurrency.lockutils [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.870671] env[61440]: INFO nova.compute.manager [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Terminating instance [ 1390.872348] env[61440]: DEBUG nova.compute.manager [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1390.872540] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1390.873205] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a5360b8-4b51-4aca-8e58-c76c99e56837 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.880801] env[61440]: DEBUG nova.compute.manager [None req-8a0e764b-2905-4c50-8d16-2201720fd429 tempest-ServerActionsTestOtherA-1315040339 tempest-ServerActionsTestOtherA-1315040339-project-member] [instance: 0244475d-98ff-4801-a648-6728f85171ea] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1390.886592] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae69b5a-3fce-4a61-b888-2cb6fecf3d05 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.904569] env[61440]: DEBUG nova.compute.manager [None req-8a0e764b-2905-4c50-8d16-2201720fd429 tempest-ServerActionsTestOtherA-1315040339 tempest-ServerActionsTestOtherA-1315040339-project-member] [instance: 0244475d-98ff-4801-a648-6728f85171ea] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1390.916079] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8a27ad2-4cc5-4219-9bc3-5735433b153c could not be found. [ 1390.916290] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1390.916472] env[61440]: INFO nova.compute.manager [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1390.916844] env[61440]: DEBUG oslo.service.loopingcall [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.919007] env[61440]: DEBUG nova.compute.manager [-] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1390.919125] env[61440]: DEBUG nova.network.neutron [-] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1390.930758] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8a0e764b-2905-4c50-8d16-2201720fd429 tempest-ServerActionsTestOtherA-1315040339 tempest-ServerActionsTestOtherA-1315040339-project-member] Lock "0244475d-98ff-4801-a648-6728f85171ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.939s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.941288] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1390.949837] env[61440]: DEBUG nova.network.neutron [-] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.959438] env[61440]: INFO nova.compute.manager [-] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] Took 0.04 seconds to deallocate network for instance. [ 1390.993525] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.993778] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.995160] env[61440]: INFO nova.compute.claims [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.073041] env[61440]: DEBUG oslo_concurrency.lockutils [None req-89c00fb2-542b-483e-a69c-67ee16c0991c tempest-ServerRescueTestJSON-191433312 tempest-ServerRescueTestJSON-191433312-project-member] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.075697] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 245.445s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.075697] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: b8a27ad2-4cc5-4219-9bc3-5735433b153c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1391.075697] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "b8a27ad2-4cc5-4219-9bc3-5735433b153c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.303991] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb547a8-0237-43b4-825b-c3ff97650800 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.311555] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1198a801-917d-44a4-8c06-8e480220974d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.342023] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6685bb3f-1c13-45c2-a66f-82cfd7013990 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.349507] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52bdf8c-7b02-463c-8907-b83202665bff {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.362356] env[61440]: DEBUG nova.compute.provider_tree [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.379192] env[61440]: DEBUG nova.scheduler.client.report [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1391.407833] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.408473] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1391.449640] env[61440]: DEBUG nova.compute.utils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.451019] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1391.451241] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1391.465339] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1391.527118] env[61440]: DEBUG nova.policy [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '719d773060694d48aacfb9fe21f9c8ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebf7143ce68b47bfb93e66b2aa5cc890', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1391.551457] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1391.585400] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1391.585646] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1391.585810] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1391.585990] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1391.586156] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1391.586309] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1391.586520] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1391.586696] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1391.586844] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1391.587017] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1391.587207] env[61440]: DEBUG nova.virt.hardware [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1391.588116] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e96b50f-0f84-4245-96bd-7c57af6a9c65 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.596816] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42230ca5-37cc-40ea-9c7f-879795c68545 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.012960] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Successfully created port: 7b42fce8-d132-4329-bb4a-7b7e72e00faa {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.963661] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Successfully updated port: 7b42fce8-d132-4329-bb4a-7b7e72e00faa {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1392.976768] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "refresh_cache-2486ea17-09bd-410d-a96d-bc863c3354e2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.976914] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "refresh_cache-2486ea17-09bd-410d-a96d-bc863c3354e2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.977087] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1393.031154] env[61440]: DEBUG nova.compute.manager [req-3cd58f4d-36c1-4dae-8398-2160a180c472 req-ff61ece3-bb0d-4d20-a95f-54d604f6d771 service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Received event network-vif-plugged-7b42fce8-d132-4329-bb4a-7b7e72e00faa {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1393.031154] env[61440]: DEBUG oslo_concurrency.lockutils [req-3cd58f4d-36c1-4dae-8398-2160a180c472 req-ff61ece3-bb0d-4d20-a95f-54d604f6d771 service nova] Acquiring lock "2486ea17-09bd-410d-a96d-bc863c3354e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.031154] env[61440]: DEBUG oslo_concurrency.lockutils [req-3cd58f4d-36c1-4dae-8398-2160a180c472 req-ff61ece3-bb0d-4d20-a95f-54d604f6d771 service nova] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.031154] env[61440]: DEBUG oslo_concurrency.lockutils [req-3cd58f4d-36c1-4dae-8398-2160a180c472 req-ff61ece3-bb0d-4d20-a95f-54d604f6d771 service nova] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.031493] env[61440]: DEBUG nova.compute.manager [req-3cd58f4d-36c1-4dae-8398-2160a180c472 req-ff61ece3-bb0d-4d20-a95f-54d604f6d771 service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] No waiting events found dispatching network-vif-plugged-7b42fce8-d132-4329-bb4a-7b7e72e00faa {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1393.031493] env[61440]: WARNING nova.compute.manager [req-3cd58f4d-36c1-4dae-8398-2160a180c472 req-ff61ece3-bb0d-4d20-a95f-54d604f6d771 service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Received unexpected event network-vif-plugged-7b42fce8-d132-4329-bb4a-7b7e72e00faa for instance with vm_state building and task_state spawning. [ 1393.046157] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1393.292137] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Updating instance_info_cache with network_info: [{"id": "7b42fce8-d132-4329-bb4a-7b7e72e00faa", "address": "fa:16:3e:e3:94:e8", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b42fce8-d1", "ovs_interfaceid": "7b42fce8-d132-4329-bb4a-7b7e72e00faa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.308473] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "refresh_cache-2486ea17-09bd-410d-a96d-bc863c3354e2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.308849] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance network_info: |[{"id": "7b42fce8-d132-4329-bb4a-7b7e72e00faa", "address": "fa:16:3e:e3:94:e8", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b42fce8-d1", "ovs_interfaceid": "7b42fce8-d132-4329-bb4a-7b7e72e00faa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1393.309321] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:94:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b42fce8-d132-4329-bb4a-7b7e72e00faa', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.318011] env[61440]: DEBUG oslo.service.loopingcall [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.318512] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1393.318791] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ba64f87-a38c-4d67-9625-bbd0d96eaa4a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.340498] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.340498] env[61440]: value = "task-4281341" [ 1393.340498] env[61440]: _type = "Task" [ 1393.340498] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.348676] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281341, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.851102] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281341, 'name': CreateVM_Task, 'duration_secs': 0.319553} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.851283] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1393.851949] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.852129] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.852438] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1393.852683] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14470389-542d-4ccc-983e-6d8909900b35 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.856817] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1393.856817] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d86f05-3ed6-3e9e-59a7-b31aaed00602" [ 1393.856817] env[61440]: _type = "Task" [ 1393.856817] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.866739] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d86f05-3ed6-3e9e-59a7-b31aaed00602, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.367791] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.368071] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.368288] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.057244] env[61440]: DEBUG nova.compute.manager [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Received event network-changed-7b42fce8-d132-4329-bb4a-7b7e72e00faa {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1395.057348] env[61440]: DEBUG nova.compute.manager [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Refreshing instance network info cache due to event network-changed-7b42fce8-d132-4329-bb4a-7b7e72e00faa. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1395.057563] env[61440]: DEBUG oslo_concurrency.lockutils [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] Acquiring lock "refresh_cache-2486ea17-09bd-410d-a96d-bc863c3354e2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.057706] env[61440]: DEBUG oslo_concurrency.lockutils [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] Acquired lock "refresh_cache-2486ea17-09bd-410d-a96d-bc863c3354e2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.057870] env[61440]: DEBUG nova.network.neutron [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Refreshing network info cache for port 7b42fce8-d132-4329-bb4a-7b7e72e00faa {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1395.343764] env[61440]: DEBUG nova.network.neutron [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Updated VIF entry in instance network info cache for port 7b42fce8-d132-4329-bb4a-7b7e72e00faa. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1395.344165] env[61440]: DEBUG nova.network.neutron [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Updating instance_info_cache with network_info: [{"id": "7b42fce8-d132-4329-bb4a-7b7e72e00faa", "address": "fa:16:3e:e3:94:e8", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b42fce8-d1", "ovs_interfaceid": "7b42fce8-d132-4329-bb4a-7b7e72e00faa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.345819] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "2486ea17-09bd-410d-a96d-bc863c3354e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.353351] env[61440]: DEBUG oslo_concurrency.lockutils [req-825cc164-8443-4946-8772-14b9c458a6b5 req-c4a5b280-d071-416c-93d1-c9f161cb25fa service nova] Releasing lock "refresh_cache-2486ea17-09bd-410d-a96d-bc863c3354e2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.275115] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.275367] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances with incomplete migration {{(pid=61440) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1401.283833] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1401.284169] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1401.284239] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1404.274953] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.274135] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.274439] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1406.274475] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1406.298109] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.298362] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.298576] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.298783] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.298986] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.299209] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.299417] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.299616] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.299813] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.300020] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1406.300228] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1407.274534] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.274834] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.748763] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "53a5db32-d312-488e-8193-df4504736fc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.748920] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "53a5db32-d312-488e-8193-df4504736fc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.281479] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.275376] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.269628] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.274290] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.286178] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.286407] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.286577] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.286736] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1410.287948] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bc4b65-7380-462b-a5de-6415280dcffd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.296479] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aaf51d-3751-4983-acfe-672f5cc4386c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.310206] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943ccc30-2d16-454b-8c48-30e8be2b9edb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.316468] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a332fa3-bb73-46b4-9786-f43e584c5b9d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.345999] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180668MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1410.346166] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.346360] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.492039] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.492474] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.492474] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.492570] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.492688] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.492832] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.492938] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.493116] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.493258] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.493378] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1410.506880] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e0872184-53cf-46b9-826d-f48c83506911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.517896] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 607c5b5e-84b8-458d-a430-5171095922f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.529270] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.540046] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.551129] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.561860] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.571598] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.571836] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1410.571986] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1410.588253] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing inventories for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1410.602934] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating ProviderTree inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1410.603297] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1410.614095] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing aggregate associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, aggregates: None {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1410.632787] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing trait associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1410.820247] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fddf91-be8c-4fa5-8a85-826eeeadea5a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.827841] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389cf6c4-b334-45c9-abe7-5912794dcdef {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.858603] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae861495-95bb-4c90-aee0-a32accfb8f1a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.865880] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bcef68-46ef-4cac-8d7a-fc017f2c85dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.878688] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.889515] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1410.906878] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1410.907081] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.561s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.274768] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.275094] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1414.287471] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] There are 0 instances to clean {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1416.441503] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fc527aa2-5927-4f97-82dd-f4598b1d6eb4 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "41d7967c-65be-4198-936e-1137afa763dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.441784] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fc527aa2-5927-4f97-82dd-f4598b1d6eb4 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "41d7967c-65be-4198-936e-1137afa763dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.490015] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1a13bfdb-c194-4e43-b9b0-c1bf8bf73e69 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Acquiring lock "9d6c9151-6d22-41fe-8f69-fd17758a20b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.490299] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1a13bfdb-c194-4e43-b9b0-c1bf8bf73e69 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "9d6c9151-6d22-41fe-8f69-fd17758a20b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.214706] env[61440]: WARNING oslo_vmware.rw_handles [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1439.214706] env[61440]: ERROR oslo_vmware.rw_handles [ 1439.215467] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1439.217100] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1439.217401] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Copying Virtual Disk [datastore2] vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/5c639eb3-6846-4cef-9b0a-0510e4cea1cb/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1439.217699] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f89b630-9e2d-47f7-b955-805600f926dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.228692] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Waiting for the task: (returnval){ [ 1439.228692] env[61440]: value = "task-4281342" [ 1439.228692] env[61440]: _type = "Task" [ 1439.228692] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.236377] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Task: {'id': task-4281342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.741750] env[61440]: DEBUG oslo_vmware.exceptions [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1439.742068] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.742611] env[61440]: ERROR nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1439.742611] env[61440]: Faults: ['InvalidArgument'] [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Traceback (most recent call last): [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] yield resources [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self.driver.spawn(context, instance, image_meta, [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self._fetch_image_if_missing(context, vi) [ 1439.742611] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] image_cache(vi, tmp_image_ds_loc) [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] vm_util.copy_virtual_disk( [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] session._wait_for_task(vmdk_copy_task) [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] return self.wait_for_task(task_ref) [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] return evt.wait() [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] result = hub.switch() [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1439.742931] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] return self.greenlet.switch() [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self.f(*self.args, **self.kw) [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] raise exceptions.translate_fault(task_info.error) [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Faults: ['InvalidArgument'] [ 1439.743466] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] [ 1439.743466] env[61440]: INFO nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Terminating instance [ 1439.744569] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.744743] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1439.745390] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1439.745577] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1439.745803] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b10bc3f5-ffd9-479e-9e8f-6c8515db9534 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.748158] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f71547-02e0-4742-ab31-50b2571dad9e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.755294] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1439.755520] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56304894-3c70-469a-847c-72b1336c5fa4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.757769] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1439.757999] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1439.759020] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40809d3f-d486-4605-a85a-829ca33ad101 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.763763] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1439.763763] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52601c84-534c-2a75-19e8-87705fa549ae" [ 1439.763763] env[61440]: _type = "Task" [ 1439.763763] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.771695] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52601c84-534c-2a75-19e8-87705fa549ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.829096] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1439.829318] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1439.829524] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Deleting the datastore file [datastore2] 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1439.829794] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e45b33cd-0c48-40f1-9247-f0d2ca8cd433 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.836221] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Waiting for the task: (returnval){ [ 1439.836221] env[61440]: value = "task-4281344" [ 1439.836221] env[61440]: _type = "Task" [ 1439.836221] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.843778] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Task: {'id': task-4281344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.274016] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1440.274378] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating directory with path [datastore2] vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1440.274580] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45fe2e68-9e4b-4c97-9745-10ae38cc2ed8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.285670] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created directory with path [datastore2] vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1440.285856] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Fetch image to [datastore2] vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1440.286039] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1440.286749] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c07c09-945c-4d87-af83-c37a02dd9b7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.293245] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f0fe91-5159-4e4c-be28-d1159313a283 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.302301] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126cd3ec-c1c2-4f2b-bade-f2ad24928b4d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.332057] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47554e31-1fda-45ff-9a07-20a40b9424dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.339918] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aca6cd06-5b3d-42c8-a7bf-a32bd28ddff9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.345755] env[61440]: DEBUG oslo_vmware.api [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Task: {'id': task-4281344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076473} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.345990] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1440.346189] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1440.346362] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1440.346534] env[61440]: INFO nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1440.348719] env[61440]: DEBUG nova.compute.claims [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1440.348898] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.349191] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.361066] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1440.417261] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1440.482719] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1440.482719] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1440.642932] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1fad5b-50f4-4c4d-8c70-905f58153920 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.650746] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b981ce-d002-4ef0-abea-32550472276e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.680335] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fce4b07-2a80-489b-b94e-27d9f8829a23 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.687700] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92fe322-dde8-4146-a443-16260d3b97d3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.700874] env[61440]: DEBUG nova.compute.provider_tree [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1440.709644] env[61440]: DEBUG nova.scheduler.client.report [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1440.729121] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.380s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.729710] env[61440]: ERROR nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1440.729710] env[61440]: Faults: ['InvalidArgument'] [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Traceback (most recent call last): [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self.driver.spawn(context, instance, image_meta, [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self._fetch_image_if_missing(context, vi) [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] image_cache(vi, tmp_image_ds_loc) [ 1440.729710] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] vm_util.copy_virtual_disk( [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] session._wait_for_task(vmdk_copy_task) [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] return self.wait_for_task(task_ref) [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] return evt.wait() [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] result = hub.switch() [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] return self.greenlet.switch() [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1440.730085] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] self.f(*self.args, **self.kw) [ 1440.730556] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1440.730556] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] raise exceptions.translate_fault(task_info.error) [ 1440.730556] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1440.730556] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Faults: ['InvalidArgument'] [ 1440.730556] env[61440]: ERROR nova.compute.manager [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] [ 1440.730556] env[61440]: DEBUG nova.compute.utils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1440.731954] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Build of instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 was re-scheduled: A specified parameter was not correct: fileType [ 1440.731954] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1440.732361] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1440.732533] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1440.732709] env[61440]: DEBUG nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1440.732905] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1441.328348] env[61440]: DEBUG nova.network.neutron [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.342729] env[61440]: INFO nova.compute.manager [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Took 0.61 seconds to deallocate network for instance. [ 1441.437669] env[61440]: INFO nova.scheduler.client.report [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Deleted allocations for instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 [ 1441.467631] env[61440]: DEBUG oslo_concurrency.lockutils [None req-05d2843e-0c00-4052-bb23-82def16a251e tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 688.315s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.468831] env[61440]: DEBUG oslo_concurrency.lockutils [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 493.237s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.469077] env[61440]: DEBUG oslo_concurrency.lockutils [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Acquiring lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.469288] env[61440]: DEBUG oslo_concurrency.lockutils [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.469515] env[61440]: DEBUG oslo_concurrency.lockutils [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.471486] env[61440]: INFO nova.compute.manager [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Terminating instance [ 1441.473330] env[61440]: DEBUG nova.compute.manager [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1441.473526] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1441.473989] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92b1bb65-9a17-40c1-b3d9-c830e2ebc8f7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.484585] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb89a64-0ff3-4245-af90-9fa2b4994d91 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.495359] env[61440]: DEBUG nova.compute.manager [None req-7664a823-e632-41a5-b7af-82426e039ac1 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] [instance: 9349b760-746f-40fa-998d-fdcb325431b2] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1441.517981] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970 could not be found. [ 1441.518204] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1441.518391] env[61440]: INFO nova.compute.manager [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1441.518979] env[61440]: DEBUG oslo.service.loopingcall [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.518979] env[61440]: DEBUG nova.compute.manager [-] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1441.518979] env[61440]: DEBUG nova.network.neutron [-] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1441.522156] env[61440]: DEBUG nova.compute.manager [None req-7664a823-e632-41a5-b7af-82426e039ac1 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] [instance: 9349b760-746f-40fa-998d-fdcb325431b2] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1441.545482] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7664a823-e632-41a5-b7af-82426e039ac1 tempest-SecurityGroupsTestJSON-1230185179 tempest-SecurityGroupsTestJSON-1230185179-project-member] Lock "9349b760-746f-40fa-998d-fdcb325431b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.544s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.547648] env[61440]: DEBUG nova.network.neutron [-] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.555111] env[61440]: INFO nova.compute.manager [-] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] Took 0.04 seconds to deallocate network for instance. [ 1441.557163] env[61440]: DEBUG nova.compute.manager [None req-a674334e-fe2c-42a7-9fa5-3ecafb0b1734 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 97f08952-2a93-4e0c-9e46-31fc421a0291] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1441.580115] env[61440]: DEBUG nova.compute.manager [None req-a674334e-fe2c-42a7-9fa5-3ecafb0b1734 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 97f08952-2a93-4e0c-9e46-31fc421a0291] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1441.606260] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a674334e-fe2c-42a7-9fa5-3ecafb0b1734 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "97f08952-2a93-4e0c-9e46-31fc421a0291" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.740s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.615795] env[61440]: DEBUG nova.compute.manager [None req-8c9e0cd0-d5ee-4c33-af73-fd79bcc3d0ba tempest-ServerMetadataNegativeTestJSON-1986415539 tempest-ServerMetadataNegativeTestJSON-1986415539-project-member] [instance: e0872184-53cf-46b9-826d-f48c83506911] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1441.643569] env[61440]: DEBUG nova.compute.manager [None req-8c9e0cd0-d5ee-4c33-af73-fd79bcc3d0ba tempest-ServerMetadataNegativeTestJSON-1986415539 tempest-ServerMetadataNegativeTestJSON-1986415539-project-member] [instance: e0872184-53cf-46b9-826d-f48c83506911] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1441.660064] env[61440]: DEBUG oslo_concurrency.lockutils [None req-edd63661-7b01-4aab-8d04-56fa048970d5 tempest-ServersTestFqdnHostnames-693636825 tempest-ServersTestFqdnHostnames-693636825-project-member] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.661060] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 296.032s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.661173] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970] During sync_power_state the instance has a pending task (deleting). Skip. [ 1441.661328] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "07b4a2c8-47e7-4d7e-aa17-46cbbaa8c970" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.665761] env[61440]: DEBUG oslo_concurrency.lockutils [None req-8c9e0cd0-d5ee-4c33-af73-fd79bcc3d0ba tempest-ServerMetadataNegativeTestJSON-1986415539 tempest-ServerMetadataNegativeTestJSON-1986415539-project-member] Lock "e0872184-53cf-46b9-826d-f48c83506911" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.844s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.687547] env[61440]: DEBUG nova.compute.manager [None req-39d3dc05-4c01-459b-898b-036e8d7486ff tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 607c5b5e-84b8-458d-a430-5171095922f3] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1441.714652] env[61440]: DEBUG nova.compute.manager [None req-39d3dc05-4c01-459b-898b-036e8d7486ff tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 607c5b5e-84b8-458d-a430-5171095922f3] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1441.738208] env[61440]: DEBUG oslo_concurrency.lockutils [None req-39d3dc05-4c01-459b-898b-036e8d7486ff tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "607c5b5e-84b8-458d-a430-5171095922f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.935s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.748944] env[61440]: DEBUG nova.compute.manager [None req-b4f12f07-5bb9-4473-a432-5f584e5509cb tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: 41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1441.773165] env[61440]: DEBUG nova.compute.manager [None req-b4f12f07-5bb9-4473-a432-5f584e5509cb tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: 41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1441.793295] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4f12f07-5bb9-4473-a432-5f584e5509cb tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "41cc3aaa-d2ef-43aa-9fb8-2c1beffe38f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.519s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.804414] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1441.860024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.860024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.861805] env[61440]: INFO nova.compute.claims [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.075918] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08756e51-5f1b-467c-b310-dced6cd809c8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.083556] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868e3084-5d54-4f30-bb42-60c8072782f3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.112614] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ea70a8-287a-411d-b2f7-79a6199a25a7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.119866] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a158aa0-3af2-462a-a11d-4f6487b301bd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.132649] env[61440]: DEBUG nova.compute.provider_tree [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.161629] env[61440]: DEBUG nova.scheduler.client.report [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.176683] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.177825] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1442.209289] env[61440]: DEBUG nova.compute.utils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1442.210810] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1442.210985] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1442.219909] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1442.273802] env[61440]: DEBUG nova.policy [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8390d2ff165d424f92d11a853db18009', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5bd8b48dfd049c4a49c96d39baeb237', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1442.284063] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1442.309186] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1442.309435] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1442.309626] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.309819] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1442.309969] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.310130] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1442.310526] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1442.310716] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1442.310890] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1442.311569] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1442.311798] env[61440]: DEBUG nova.virt.hardware [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1442.312653] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0b0cd1-16bf-40dc-a6d8-05da8f2621b2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.320681] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff86303-8b34-446d-bf42-863b681e0b6e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.600307] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Successfully created port: 7356163c-ad0a-41a5-bfdc-ef6483d3a077 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1443.433113] env[61440]: DEBUG nova.compute.manager [req-ebf97fcc-0ec4-49c2-9c2a-c39140ff3465 req-eb3f9e03-a1d0-4f9c-ba89-499e3ab00dae service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Received event network-vif-plugged-7356163c-ad0a-41a5-bfdc-ef6483d3a077 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1443.433340] env[61440]: DEBUG oslo_concurrency.lockutils [req-ebf97fcc-0ec4-49c2-9c2a-c39140ff3465 req-eb3f9e03-a1d0-4f9c-ba89-499e3ab00dae service nova] Acquiring lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.433545] env[61440]: DEBUG oslo_concurrency.lockutils [req-ebf97fcc-0ec4-49c2-9c2a-c39140ff3465 req-eb3f9e03-a1d0-4f9c-ba89-499e3ab00dae service nova] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.433713] env[61440]: DEBUG oslo_concurrency.lockutils [req-ebf97fcc-0ec4-49c2-9c2a-c39140ff3465 req-eb3f9e03-a1d0-4f9c-ba89-499e3ab00dae service nova] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.433898] env[61440]: DEBUG nova.compute.manager [req-ebf97fcc-0ec4-49c2-9c2a-c39140ff3465 req-eb3f9e03-a1d0-4f9c-ba89-499e3ab00dae service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] No waiting events found dispatching network-vif-plugged-7356163c-ad0a-41a5-bfdc-ef6483d3a077 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1443.434298] env[61440]: WARNING nova.compute.manager [req-ebf97fcc-0ec4-49c2-9c2a-c39140ff3465 req-eb3f9e03-a1d0-4f9c-ba89-499e3ab00dae service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Received unexpected event network-vif-plugged-7356163c-ad0a-41a5-bfdc-ef6483d3a077 for instance with vm_state building and task_state spawning. [ 1443.515206] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Successfully updated port: 7356163c-ad0a-41a5-bfdc-ef6483d3a077 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1443.526117] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "refresh_cache-ccdd9481-6f4b-4a84-9f05-a4709b6615d9" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.526117] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquired lock "refresh_cache-ccdd9481-6f4b-4a84-9f05-a4709b6615d9" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.526117] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1443.568771] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1443.770345] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Updating instance_info_cache with network_info: [{"id": "7356163c-ad0a-41a5-bfdc-ef6483d3a077", "address": "fa:16:3e:41:fe:a5", "network": {"id": "f1140b59-9757-4ff5-9a22-9c2ced926a25", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-735898291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5bd8b48dfd049c4a49c96d39baeb237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7356163c-ad", "ovs_interfaceid": "7356163c-ad0a-41a5-bfdc-ef6483d3a077", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.784125] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Releasing lock "refresh_cache-ccdd9481-6f4b-4a84-9f05-a4709b6615d9" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.784425] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance network_info: |[{"id": "7356163c-ad0a-41a5-bfdc-ef6483d3a077", "address": "fa:16:3e:41:fe:a5", "network": {"id": "f1140b59-9757-4ff5-9a22-9c2ced926a25", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-735898291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5bd8b48dfd049c4a49c96d39baeb237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7356163c-ad", "ovs_interfaceid": "7356163c-ad0a-41a5-bfdc-ef6483d3a077", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1443.784966] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:fe:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7356163c-ad0a-41a5-bfdc-ef6483d3a077', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1443.792478] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Creating folder: Project (b5bd8b48dfd049c4a49c96d39baeb237). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1443.793075] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3555ef8-cd3a-4844-836b-44562e9bec60 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.804013] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Created folder: Project (b5bd8b48dfd049c4a49c96d39baeb237) in parent group-v843372. [ 1443.804291] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Creating folder: Instances. Parent ref: group-v843454. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1443.804543] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57968f06-3e07-45a1-800b-2c0b8eb36720 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.813866] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Created folder: Instances in parent group-v843454. [ 1443.814123] env[61440]: DEBUG oslo.service.loopingcall [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1443.814316] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1443.814515] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4d247cf-e45a-4328-a3a6-79e7ecdfc27f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.832999] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1443.832999] env[61440]: value = "task-4281347" [ 1443.832999] env[61440]: _type = "Task" [ 1443.832999] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.840146] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281347, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.342999] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281347, 'name': CreateVM_Task, 'duration_secs': 0.317027} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.343192] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1444.343845] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.344022] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.344353] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1444.344627] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdce2e70-2d6f-4103-89b7-baf5f6f05147 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.348940] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Waiting for the task: (returnval){ [ 1444.348940] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5289d7a6-4afc-b33d-cf1c-bf9ab4622201" [ 1444.348940] env[61440]: _type = "Task" [ 1444.348940] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.356355] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5289d7a6-4afc-b33d-cf1c-bf9ab4622201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.860083] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.860420] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1444.860564] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.466914] env[61440]: DEBUG nova.compute.manager [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Received event network-changed-7356163c-ad0a-41a5-bfdc-ef6483d3a077 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1445.467124] env[61440]: DEBUG nova.compute.manager [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Refreshing instance network info cache due to event network-changed-7356163c-ad0a-41a5-bfdc-ef6483d3a077. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1445.467339] env[61440]: DEBUG oslo_concurrency.lockutils [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] Acquiring lock "refresh_cache-ccdd9481-6f4b-4a84-9f05-a4709b6615d9" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.467485] env[61440]: DEBUG oslo_concurrency.lockutils [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] Acquired lock "refresh_cache-ccdd9481-6f4b-4a84-9f05-a4709b6615d9" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.467651] env[61440]: DEBUG nova.network.neutron [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Refreshing network info cache for port 7356163c-ad0a-41a5-bfdc-ef6483d3a077 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1445.827757] env[61440]: DEBUG nova.network.neutron [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Updated VIF entry in instance network info cache for port 7356163c-ad0a-41a5-bfdc-ef6483d3a077. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1445.828351] env[61440]: DEBUG nova.network.neutron [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Updating instance_info_cache with network_info: [{"id": "7356163c-ad0a-41a5-bfdc-ef6483d3a077", "address": "fa:16:3e:41:fe:a5", "network": {"id": "f1140b59-9757-4ff5-9a22-9c2ced926a25", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-735898291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5bd8b48dfd049c4a49c96d39baeb237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7356163c-ad", "ovs_interfaceid": "7356163c-ad0a-41a5-bfdc-ef6483d3a077", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.837288] env[61440]: DEBUG oslo_concurrency.lockutils [req-cf89cd07-9c1b-47d5-b578-6d8972914045 req-f7689ff3-02c8-4ba2-a637-cba71d2a797b service nova] Releasing lock "refresh_cache-ccdd9481-6f4b-4a84-9f05-a4709b6615d9" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.157920] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.158179] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.339320] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.287856] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.288158] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.288279] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1465.274368] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.274609] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.274971] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1466.274971] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1466.299057] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299057] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299057] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299057] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299264] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299645] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299645] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299764] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.299764] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.300026] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1466.300115] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1468.274255] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.271154] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.273738] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.273913] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.285571] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.285787] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.285956] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.286145] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1470.287599] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d009b3-08a4-4d95-bfbb-b209e30e452a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.296514] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8be4cd0-aa80-412a-ba1e-6e51eb09aaca {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.310080] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3912f77-cce5-4505-84d9-dcc859dc593a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.316205] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1ac29f-96bd-4602-813d-3a53214c2d51 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.345879] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180673MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1470.346050] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.346253] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.419975] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420166] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420302] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420428] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420549] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420667] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420781] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.420937] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.421028] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.421163] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1470.436928] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.449290] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.459875] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.470111] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 41d7967c-65be-4198-936e-1137afa763dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.480444] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9d6c9151-6d22-41fe-8f69-fd17758a20b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.490527] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.490764] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1470.490929] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1470.681297] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faaff74-8d36-4a34-a96c-0aa24d480615 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.688901] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3229d3-4af3-4a41-8018-ad2e3b5eb466 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.719718] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a24789-2d31-4025-950b-d20347ddfedf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.726706] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81096e0-2edc-48c3-a465-d27f848c06d4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.739497] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.747947] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1470.761713] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1470.761898] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.416s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.762203] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.270128] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.787552] env[61440]: WARNING oslo_vmware.rw_handles [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1489.787552] env[61440]: ERROR oslo_vmware.rw_handles [ 1489.788157] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1489.790077] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1489.790386] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Copying Virtual Disk [datastore2] vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/092223f1-3e85-4d31-9b19-ce67e6dbade1/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1489.790682] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef806220-a75f-4eaa-9bf1-383fd1aacee3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.798684] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1489.798684] env[61440]: value = "task-4281348" [ 1489.798684] env[61440]: _type = "Task" [ 1489.798684] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.806385] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.309345] env[61440]: DEBUG oslo_vmware.exceptions [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1490.309632] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.310201] env[61440]: ERROR nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1490.310201] env[61440]: Faults: ['InvalidArgument'] [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Traceback (most recent call last): [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] yield resources [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self.driver.spawn(context, instance, image_meta, [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self._fetch_image_if_missing(context, vi) [ 1490.310201] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] image_cache(vi, tmp_image_ds_loc) [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] vm_util.copy_virtual_disk( [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] session._wait_for_task(vmdk_copy_task) [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] return self.wait_for_task(task_ref) [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] return evt.wait() [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] result = hub.switch() [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1490.310552] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] return self.greenlet.switch() [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self.f(*self.args, **self.kw) [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] raise exceptions.translate_fault(task_info.error) [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Faults: ['InvalidArgument'] [ 1490.310870] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] [ 1490.310870] env[61440]: INFO nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Terminating instance [ 1490.312116] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.312331] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.312570] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cfd22cf-d86a-485d-9c1a-56bb18223324 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.314644] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1490.314853] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1490.315569] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea3f491-2294-43ab-8952-7cc2ed510f4a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.322064] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1490.322295] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e24c1b46-d862-4f5c-8bdb-aaa2ceaff812 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.324372] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.324547] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1490.325467] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc81a25c-ad9f-48e6-a046-4ba4a32a07c6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.329864] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1490.329864] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d5aa02-4ccb-02d1-f90d-2b346dc1664b" [ 1490.329864] env[61440]: _type = "Task" [ 1490.329864] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.801417] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1490.801767] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1490.801890] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleting the datastore file [datastore2] 6765defd-cd4d-49e2-a734-7b3cccca8bbd {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1490.802114] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8e9c2f3-3d8b-431e-97b4-c232ec2779e6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.808351] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1490.808351] env[61440]: value = "task-4281350" [ 1490.808351] env[61440]: _type = "Task" [ 1490.808351] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.815667] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.839129] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1490.840031] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating directory with path [datastore2] vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.840031] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b737f1a6-4bfd-4348-ad23-aa178f1b5d7e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.858828] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Created directory with path [datastore2] vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.858828] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Fetch image to [datastore2] vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1490.858828] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1490.859639] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702d6ddc-4c8e-42ae-9286-64799549e771 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.866520] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368913f7-6050-4ea2-85c6-1133ddfb345f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.878082] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503040e2-c260-4694-bade-73d69c2068ba {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.908820] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea38958-9132-46a5-81dd-8874bd1c9eb0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.914913] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-21b1beb5-36d4-43e5-bd83-297488c3426d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.939154] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1491.023221] env[61440]: DEBUG oslo_vmware.rw_handles [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1491.083922] env[61440]: DEBUG oslo_vmware.rw_handles [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1491.084167] env[61440]: DEBUG oslo_vmware.rw_handles [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1491.319565] env[61440]: DEBUG oslo_vmware.api [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080072} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.319818] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1491.319999] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1491.320201] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1491.320425] env[61440]: INFO nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Took 1.01 seconds to destroy the instance on the hypervisor. [ 1491.322692] env[61440]: DEBUG nova.compute.claims [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1491.322867] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.323091] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.618395] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b98bb1-71c7-4304-8921-8bfb87c15220 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.626114] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5761f2e-ef16-4097-b908-6a30a465f694 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.655707] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de1b224-9955-4fb2-852f-32f4bba3daf8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.662681] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e6401a-608d-40f1-880e-4d55b6a8bdcb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.675333] env[61440]: DEBUG nova.compute.provider_tree [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.683841] env[61440]: DEBUG nova.scheduler.client.report [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1491.711471] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.388s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.712119] env[61440]: ERROR nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1491.712119] env[61440]: Faults: ['InvalidArgument'] [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Traceback (most recent call last): [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self.driver.spawn(context, instance, image_meta, [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self._fetch_image_if_missing(context, vi) [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] image_cache(vi, tmp_image_ds_loc) [ 1491.712119] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] vm_util.copy_virtual_disk( [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] session._wait_for_task(vmdk_copy_task) [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] return self.wait_for_task(task_ref) [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] return evt.wait() [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] result = hub.switch() [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] return self.greenlet.switch() [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1491.712648] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] self.f(*self.args, **self.kw) [ 1491.713074] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1491.713074] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] raise exceptions.translate_fault(task_info.error) [ 1491.713074] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1491.713074] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Faults: ['InvalidArgument'] [ 1491.713074] env[61440]: ERROR nova.compute.manager [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] [ 1491.713074] env[61440]: DEBUG nova.compute.utils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1491.714335] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Build of instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd was re-scheduled: A specified parameter was not correct: fileType [ 1491.714335] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1491.714710] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1491.714881] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1491.715064] env[61440]: DEBUG nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1491.715233] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1492.429754] env[61440]: DEBUG nova.network.neutron [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.468830] env[61440]: INFO nova.compute.manager [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Took 0.75 seconds to deallocate network for instance. [ 1492.653809] env[61440]: INFO nova.scheduler.client.report [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleted allocations for instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd [ 1492.703878] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a5245816-0362-4074-b900-9dda18b6d1f7 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 687.928s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.704982] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 491.341s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.705214] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.705420] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.705599] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.707550] env[61440]: INFO nova.compute.manager [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Terminating instance [ 1492.709166] env[61440]: DEBUG nova.compute.manager [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1492.709361] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1492.709839] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2b4ad84-ebf9-4d81-8d36-bdca38f5b137 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.718828] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead60003-e51e-4e62-99bb-85ea244f7825 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.729148] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1492.749197] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6765defd-cd4d-49e2-a734-7b3cccca8bbd could not be found. [ 1492.749387] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1492.749721] env[61440]: INFO nova.compute.manager [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1492.752709] env[61440]: DEBUG oslo.service.loopingcall [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1492.753058] env[61440]: DEBUG nova.compute.manager [-] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1492.753167] env[61440]: DEBUG nova.network.neutron [-] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1492.791899] env[61440]: DEBUG nova.network.neutron [-] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.799380] env[61440]: INFO nova.compute.manager [-] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] Took 0.05 seconds to deallocate network for instance. [ 1492.884631] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.884759] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.886752] env[61440]: INFO nova.compute.claims [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1492.985102] env[61440]: DEBUG oslo_concurrency.lockutils [None req-e190bf15-1d93-45b6-974d-b18e212110c9 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.280s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.986021] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 347.356s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.986311] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 6765defd-cd4d-49e2-a734-7b3cccca8bbd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1492.986510] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "6765defd-cd4d-49e2-a734-7b3cccca8bbd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.179459] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14310221-c20a-4eda-922a-0181644f85e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.187912] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b1ffe8-c168-4e90-ad68-079f0957cedd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.217006] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f24d32-a1e3-4bad-929f-78e4ea6913e7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.224329] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ac83ce-3e8e-4729-a982-3f8d0f4f71e4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.236894] env[61440]: DEBUG nova.compute.provider_tree [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.246567] env[61440]: DEBUG nova.scheduler.client.report [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1493.274569] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.390s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.275109] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1493.342907] env[61440]: DEBUG nova.compute.utils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1493.344391] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1493.344565] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1493.389938] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1493.552104] env[61440]: DEBUG nova.policy [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a265c56480374729bfd01371371c9794', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2851ef119a794c5993d1d8ff98eaf249', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1493.555469] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1493.608840] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1493.609097] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1493.609269] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1493.609462] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1493.609577] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1493.609730] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1493.609941] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1493.610120] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1493.610291] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1493.610503] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1493.610679] env[61440]: DEBUG nova.virt.hardware [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1493.611670] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfd7a6f-a0fc-4489-bc13-ad261203453f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.619690] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab386b0-2782-4d9e-8187-3ef3fac98de8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.973587] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Successfully created port: a6ae5c30-cde0-434d-b598-ba6cc654b276 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1494.668547] env[61440]: DEBUG nova.compute.manager [req-91bba99e-8b2b-454c-898b-85634d9748c7 req-63232f04-48f3-4e46-802b-cd8d69ee22bd service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Received event network-vif-plugged-a6ae5c30-cde0-434d-b598-ba6cc654b276 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1494.669752] env[61440]: DEBUG oslo_concurrency.lockutils [req-91bba99e-8b2b-454c-898b-85634d9748c7 req-63232f04-48f3-4e46-802b-cd8d69ee22bd service nova] Acquiring lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.670287] env[61440]: DEBUG oslo_concurrency.lockutils [req-91bba99e-8b2b-454c-898b-85634d9748c7 req-63232f04-48f3-4e46-802b-cd8d69ee22bd service nova] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.670394] env[61440]: DEBUG oslo_concurrency.lockutils [req-91bba99e-8b2b-454c-898b-85634d9748c7 req-63232f04-48f3-4e46-802b-cd8d69ee22bd service nova] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.671138] env[61440]: DEBUG nova.compute.manager [req-91bba99e-8b2b-454c-898b-85634d9748c7 req-63232f04-48f3-4e46-802b-cd8d69ee22bd service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] No waiting events found dispatching network-vif-plugged-a6ae5c30-cde0-434d-b598-ba6cc654b276 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1494.671409] env[61440]: WARNING nova.compute.manager [req-91bba99e-8b2b-454c-898b-85634d9748c7 req-63232f04-48f3-4e46-802b-cd8d69ee22bd service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Received unexpected event network-vif-plugged-a6ae5c30-cde0-434d-b598-ba6cc654b276 for instance with vm_state building and task_state spawning. [ 1494.719286] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Successfully updated port: a6ae5c30-cde0-434d-b598-ba6cc654b276 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1494.752667] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "refresh_cache-ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.752831] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired lock "refresh_cache-ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.753299] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1494.799897] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1495.020602] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Updating instance_info_cache with network_info: [{"id": "a6ae5c30-cde0-434d-b598-ba6cc654b276", "address": "fa:16:3e:0a:42:d4", "network": {"id": "d5127eee-6df8-4d15-ab3a-d2702fa2d058", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1444475798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2851ef119a794c5993d1d8ff98eaf249", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c24464bb-bb6b-43a2-bdcd-8086ad1a307f", "external-id": "nsx-vlan-transportzone-781", "segmentation_id": 781, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6ae5c30-cd", "ovs_interfaceid": "a6ae5c30-cde0-434d-b598-ba6cc654b276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.039024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Releasing lock "refresh_cache-ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.039342] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance network_info: |[{"id": "a6ae5c30-cde0-434d-b598-ba6cc654b276", "address": "fa:16:3e:0a:42:d4", "network": {"id": "d5127eee-6df8-4d15-ab3a-d2702fa2d058", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1444475798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2851ef119a794c5993d1d8ff98eaf249", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c24464bb-bb6b-43a2-bdcd-8086ad1a307f", "external-id": "nsx-vlan-transportzone-781", "segmentation_id": 781, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6ae5c30-cd", "ovs_interfaceid": "a6ae5c30-cde0-434d-b598-ba6cc654b276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1495.039869] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:42:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c24464bb-bb6b-43a2-bdcd-8086ad1a307f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6ae5c30-cde0-434d-b598-ba6cc654b276', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.047817] env[61440]: DEBUG oslo.service.loopingcall [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.048884] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1495.048884] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a580f40-c292-4f92-bf17-0951ce50b291 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.070880] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.070880] env[61440]: value = "task-4281351" [ 1495.070880] env[61440]: _type = "Task" [ 1495.070880] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.080088] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281351, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.580812] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281351, 'name': CreateVM_Task, 'duration_secs': 0.318585} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.580812] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1495.581594] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.581774] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.582195] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1495.582350] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd1f32e2-60e4-4655-b477-760ede8e2238 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.586917] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1495.586917] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52893883-9ad2-071c-ea53-4807904d4b38" [ 1495.586917] env[61440]: _type = "Task" [ 1495.586917] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.594522] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52893883-9ad2-071c-ea53-4807904d4b38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.098547] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.098803] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.099020] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.708559] env[61440]: DEBUG nova.compute.manager [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Received event network-changed-a6ae5c30-cde0-434d-b598-ba6cc654b276 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1496.708928] env[61440]: DEBUG nova.compute.manager [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Refreshing instance network info cache due to event network-changed-a6ae5c30-cde0-434d-b598-ba6cc654b276. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1496.709015] env[61440]: DEBUG oslo_concurrency.lockutils [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] Acquiring lock "refresh_cache-ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.709150] env[61440]: DEBUG oslo_concurrency.lockutils [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] Acquired lock "refresh_cache-ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.709294] env[61440]: DEBUG nova.network.neutron [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Refreshing network info cache for port a6ae5c30-cde0-434d-b598-ba6cc654b276 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1497.479343] env[61440]: DEBUG nova.network.neutron [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Updated VIF entry in instance network info cache for port a6ae5c30-cde0-434d-b598-ba6cc654b276. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1497.479679] env[61440]: DEBUG nova.network.neutron [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Updating instance_info_cache with network_info: [{"id": "a6ae5c30-cde0-434d-b598-ba6cc654b276", "address": "fa:16:3e:0a:42:d4", "network": {"id": "d5127eee-6df8-4d15-ab3a-d2702fa2d058", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1444475798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2851ef119a794c5993d1d8ff98eaf249", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c24464bb-bb6b-43a2-bdcd-8086ad1a307f", "external-id": "nsx-vlan-transportzone-781", "segmentation_id": 781, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6ae5c30-cd", "ovs_interfaceid": "a6ae5c30-cde0-434d-b598-ba6cc654b276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.489624] env[61440]: DEBUG oslo_concurrency.lockutils [req-9bb001d0-f963-4a93-88ce-48f39864f490 req-cf51a22c-d2da-478c-a2b3-68a10c29b7a8 service nova] Releasing lock "refresh_cache-ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.992019] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.992450] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.763310] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a2e324a-cd30-4ecf-8eea-cfaab3aa43c6 tempest-ServersNegativeTestMultiTenantJSON-63877719 tempest-ServersNegativeTestMultiTenantJSON-63877719-project-member] Acquiring lock "ee5f4d65-3264-451a-9e9e-8a7e47b1b527" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.763642] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a2e324a-cd30-4ecf-8eea-cfaab3aa43c6 tempest-ServersNegativeTestMultiTenantJSON-63877719 tempest-ServersNegativeTestMultiTenantJSON-63877719-project-member] Lock "ee5f4d65-3264-451a-9e9e-8a7e47b1b527" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.275051] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.275360] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.275360] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1526.275196] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.275463] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1526.275505] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1526.298212] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.298373] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.298501] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.298627] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.298810] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.298972] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.299115] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.299237] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.299973] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.299973] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1526.299973] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1527.274497] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.275472] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.275156] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.275395] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.287347] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.287718] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.287718] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.287878] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1530.289015] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35422c7-6901-40b2-8475-f4e07cfa1b8a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.297646] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414770c2-e827-4f08-a1f4-a366a2fd83d6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.312462] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04b3177-32a3-43b8-ad84-70896df862f0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.318574] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1910b1e7-5241-4674-b41a-cbbd88795098 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.347499] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180657MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1530.347672] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.347836] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.423037] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423214] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423346] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423470] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423593] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423712] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423831] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.423947] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.424076] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.424196] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.435040] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.448880] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.458404] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 41d7967c-65be-4198-936e-1137afa763dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.468720] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9d6c9151-6d22-41fe-8f69-fd17758a20b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.478367] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.488011] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.497740] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ee5f4d65-3264-451a-9e9e-8a7e47b1b527 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.498083] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1530.498229] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1530.714448] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25988955-a9f6-435b-b1b8-bc84137b5a2a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.721911] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9772e8-b140-4555-a9c3-0a0ed3d1bc86 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.751771] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d05392-02be-41ee-a860-08c4d3332d43 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.758233] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777ddaf0-d78c-40b9-b8d9-91728990da0e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.770647] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.778818] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1530.794115] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1530.794222] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.446s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.789525] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.789829] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1540.449075] env[61440]: WARNING oslo_vmware.rw_handles [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1540.449075] env[61440]: ERROR oslo_vmware.rw_handles [ 1540.449738] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1540.451466] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1540.451719] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Copying Virtual Disk [datastore2] vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/18ccd9a0-315f-4de8-86f6-c404a789b803/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1540.452009] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3958405e-8365-475a-8eb7-92c532b79381 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.460408] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1540.460408] env[61440]: value = "task-4281352" [ 1540.460408] env[61440]: _type = "Task" [ 1540.460408] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.468388] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': task-4281352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.971568] env[61440]: DEBUG oslo_vmware.exceptions [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1540.971836] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.972696] env[61440]: ERROR nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1540.972696] env[61440]: Faults: ['InvalidArgument'] [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Traceback (most recent call last): [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] yield resources [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self.driver.spawn(context, instance, image_meta, [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self._fetch_image_if_missing(context, vi) [ 1540.972696] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] image_cache(vi, tmp_image_ds_loc) [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] vm_util.copy_virtual_disk( [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] session._wait_for_task(vmdk_copy_task) [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] return self.wait_for_task(task_ref) [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] return evt.wait() [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] result = hub.switch() [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1540.973121] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] return self.greenlet.switch() [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self.f(*self.args, **self.kw) [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] raise exceptions.translate_fault(task_info.error) [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Faults: ['InvalidArgument'] [ 1540.973481] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] [ 1540.973481] env[61440]: INFO nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Terminating instance [ 1540.974252] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.974459] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1540.974699] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c5a697a-554b-4a98-aae3-63ab85025e52 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.977041] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1540.977236] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1540.977946] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f835ea-5d01-43f4-91b9-d348aeb2688c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.984648] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1540.984854] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-589a508c-4eef-4b92-8bc8-2610a97db673 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.986889] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1540.987072] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1540.987979] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c26100a0-6a72-4001-9f4f-333bf1452140 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.992577] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1540.992577] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526e3329-8efd-477d-218a-d901158cd090" [ 1540.992577] env[61440]: _type = "Task" [ 1540.992577] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.002030] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526e3329-8efd-477d-218a-d901158cd090, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.053929] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1541.054163] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1541.054347] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Deleting the datastore file [datastore2] 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1541.054614] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb02ac57-f97a-4d62-8ce5-086c98e2429c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.062854] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1541.062854] env[61440]: value = "task-4281354" [ 1541.062854] env[61440]: _type = "Task" [ 1541.062854] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.069160] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': task-4281354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.502768] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1541.503091] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating directory with path [datastore2] vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1541.503283] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c9a0254-2694-448f-a8c3-2423a0e36ea1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.516194] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Created directory with path [datastore2] vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1541.516393] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Fetch image to [datastore2] vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1541.516567] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1541.517336] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42cc2bd-f5dc-4fb3-a72b-a6623eefaa45 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.523534] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4c7368-859b-4c12-b63c-725f2539b139 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.532205] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047d6d41-ebda-4e38-b5a2-683bdf3780ce {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.561432] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c511b31-5a29-4f57-9a15-01ef1083ba11 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.570513] env[61440]: DEBUG oslo_vmware.api [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': task-4281354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082843} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.571918] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1541.572154] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1541.572367] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1541.572549] env[61440]: INFO nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1541.574328] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a18c39ea-66cd-46b9-9a9f-2d5146978543 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.576231] env[61440]: DEBUG nova.compute.claims [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1541.576404] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.576613] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.599134] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1541.767493] env[61440]: DEBUG oslo_vmware.rw_handles [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1541.827913] env[61440]: DEBUG oslo_vmware.rw_handles [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1541.828119] env[61440]: DEBUG oslo_vmware.rw_handles [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1541.877325] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6127a2-d96a-4bac-94e4-f091a61b8b28 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.885034] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ed1269-0a00-44f7-9203-e723e64c3b74 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.915138] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc167ea-5bd8-4dce-ba07-ed46eddf0768 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.922811] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7eece79-18cc-4a6a-8e09-3e229c18f5bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.936020] env[61440]: DEBUG nova.compute.provider_tree [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.945252] env[61440]: DEBUG nova.scheduler.client.report [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1541.959486] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.960039] env[61440]: ERROR nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1541.960039] env[61440]: Faults: ['InvalidArgument'] [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Traceback (most recent call last): [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self.driver.spawn(context, instance, image_meta, [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self._fetch_image_if_missing(context, vi) [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] image_cache(vi, tmp_image_ds_loc) [ 1541.960039] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] vm_util.copy_virtual_disk( [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] session._wait_for_task(vmdk_copy_task) [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] return self.wait_for_task(task_ref) [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] return evt.wait() [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] result = hub.switch() [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] return self.greenlet.switch() [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1541.960380] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] self.f(*self.args, **self.kw) [ 1541.960695] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1541.960695] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] raise exceptions.translate_fault(task_info.error) [ 1541.960695] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1541.960695] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Faults: ['InvalidArgument'] [ 1541.960695] env[61440]: ERROR nova.compute.manager [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] [ 1541.960826] env[61440]: DEBUG nova.compute.utils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1541.962218] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Build of instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f was re-scheduled: A specified parameter was not correct: fileType [ 1541.962218] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1541.962614] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1541.962788] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1541.962956] env[61440]: DEBUG nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1541.963147] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1542.104765] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.348772] env[61440]: DEBUG nova.network.neutron [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.363425] env[61440]: INFO nova.compute.manager [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Took 0.40 seconds to deallocate network for instance. [ 1542.458328] env[61440]: INFO nova.scheduler.client.report [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Deleted allocations for instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f [ 1542.477659] env[61440]: DEBUG oslo_concurrency.lockutils [None req-59d5ddba-f2db-4cd2-8f03-50b875163961 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.196s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.478789] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 396.849s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.478985] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] During sync_power_state the instance has a pending task (spawning). Skip. [ 1542.479177] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.479774] env[61440]: DEBUG oslo_concurrency.lockutils [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 243.807s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.479991] env[61440]: DEBUG oslo_concurrency.lockutils [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.480208] env[61440]: DEBUG oslo_concurrency.lockutils [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.480376] env[61440]: DEBUG oslo_concurrency.lockutils [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.482225] env[61440]: INFO nova.compute.manager [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Terminating instance [ 1542.483961] env[61440]: DEBUG nova.compute.manager [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1542.484169] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1542.484452] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-876e5061-eb05-409a-a74d-f967620d034e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.490304] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1542.496825] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad3c09f-19d1-46ce-ab34-98e2490135c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.526473] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f could not be found. [ 1542.526968] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1542.526968] env[61440]: INFO nova.compute.manager [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1542.527142] env[61440]: DEBUG oslo.service.loopingcall [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1542.527824] env[61440]: DEBUG nova.compute.manager [-] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1542.527824] env[61440]: DEBUG nova.network.neutron [-] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1542.544687] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.544930] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.546436] env[61440]: INFO nova.compute.claims [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1542.565125] env[61440]: DEBUG nova.network.neutron [-] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.577246] env[61440]: INFO nova.compute.manager [-] [instance: 16eeb135-f151-4b5e-a6eb-9e5fd0393d1f] Took 0.05 seconds to deallocate network for instance. [ 1542.677731] env[61440]: DEBUG oslo_concurrency.lockutils [None req-42e54c8c-b8c1-494d-89b7-71bbdb180326 tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "16eeb135-f151-4b5e-a6eb-9e5fd0393d1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.823833] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51225a88-8408-46e7-8185-ff94cd2795a5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.830988] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd6e1d4-8ad9-4f5c-af79-e0dac7b3c5cd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.860185] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c5d2a7-20d5-435c-b477-4f9da5f2db10 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.866965] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc67e45-4b6b-400a-a1c5-258db0c6ce04 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.879521] env[61440]: DEBUG nova.compute.provider_tree [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1542.889224] env[61440]: DEBUG nova.scheduler.client.report [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1542.905427] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.360s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.905880] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1542.945938] env[61440]: DEBUG nova.compute.utils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1542.947102] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1542.947102] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1542.957973] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1543.016588] env[61440]: DEBUG nova.policy [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02aa958bbbdc49ab8d494fe9afc40779', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a38fb630f3e41acbcd97dee8e89aba8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1543.023032] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1543.050896] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.051287] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.051434] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.051619] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.051766] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.051914] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.052151] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.052342] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.052551] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.052723] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.052896] env[61440]: DEBUG nova.virt.hardware [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.053923] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3605f59-f327-48a6-9e0e-a90361cfa6c5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.065147] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec96c63-0072-4756-bb28-6b748fb8f0cc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.399814] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Successfully created port: 97f549b5-efa3-4e36-9bce-9d2431477c6a {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1544.689697] env[61440]: DEBUG nova.compute.manager [req-ba1c7b84-62a7-4020-b048-59d97db9ef41 req-fc06eadd-1268-4adb-9595-84adc31684f3 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Received event network-vif-plugged-97f549b5-efa3-4e36-9bce-9d2431477c6a {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1544.689945] env[61440]: DEBUG oslo_concurrency.lockutils [req-ba1c7b84-62a7-4020-b048-59d97db9ef41 req-fc06eadd-1268-4adb-9595-84adc31684f3 service nova] Acquiring lock "c307f560-e474-441f-b099-53c2fd290488-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.690163] env[61440]: DEBUG oslo_concurrency.lockutils [req-ba1c7b84-62a7-4020-b048-59d97db9ef41 req-fc06eadd-1268-4adb-9595-84adc31684f3 service nova] Lock "c307f560-e474-441f-b099-53c2fd290488-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.690323] env[61440]: DEBUG oslo_concurrency.lockutils [req-ba1c7b84-62a7-4020-b048-59d97db9ef41 req-fc06eadd-1268-4adb-9595-84adc31684f3 service nova] Lock "c307f560-e474-441f-b099-53c2fd290488-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.690493] env[61440]: DEBUG nova.compute.manager [req-ba1c7b84-62a7-4020-b048-59d97db9ef41 req-fc06eadd-1268-4adb-9595-84adc31684f3 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] No waiting events found dispatching network-vif-plugged-97f549b5-efa3-4e36-9bce-9d2431477c6a {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1544.690659] env[61440]: WARNING nova.compute.manager [req-ba1c7b84-62a7-4020-b048-59d97db9ef41 req-fc06eadd-1268-4adb-9595-84adc31684f3 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Received unexpected event network-vif-plugged-97f549b5-efa3-4e36-9bce-9d2431477c6a for instance with vm_state building and task_state spawning. [ 1544.794495] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Successfully updated port: 97f549b5-efa3-4e36-9bce-9d2431477c6a {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1544.806050] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "refresh_cache-c307f560-e474-441f-b099-53c2fd290488" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.806210] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "refresh_cache-c307f560-e474-441f-b099-53c2fd290488" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.806365] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1544.914498] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1545.152044] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Updating instance_info_cache with network_info: [{"id": "97f549b5-efa3-4e36-9bce-9d2431477c6a", "address": "fa:16:3e:c9:87:3b", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97f549b5-ef", "ovs_interfaceid": "97f549b5-efa3-4e36-9bce-9d2431477c6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.167400] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "refresh_cache-c307f560-e474-441f-b099-53c2fd290488" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.167780] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance network_info: |[{"id": "97f549b5-efa3-4e36-9bce-9d2431477c6a", "address": "fa:16:3e:c9:87:3b", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97f549b5-ef", "ovs_interfaceid": "97f549b5-efa3-4e36-9bce-9d2431477c6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1545.168215] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:87:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '97f549b5-efa3-4e36-9bce-9d2431477c6a', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.176410] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating folder: Project (1a38fb630f3e41acbcd97dee8e89aba8). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1545.176979] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9a0a343-964f-4614-9737-047217b0f089 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.189192] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Created folder: Project (1a38fb630f3e41acbcd97dee8e89aba8) in parent group-v843372. [ 1545.189448] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating folder: Instances. Parent ref: group-v843458. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1545.189707] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85fe70ca-8a97-4cae-bc6e-8701e0e72216 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.199927] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Created folder: Instances in parent group-v843458. [ 1545.200178] env[61440]: DEBUG oslo.service.loopingcall [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.200560] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c307f560-e474-441f-b099-53c2fd290488] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1545.200755] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7ce83c9-f8ed-40ce-bab3-1a43c17988d3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.220740] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.220740] env[61440]: value = "task-4281357" [ 1545.220740] env[61440]: _type = "Task" [ 1545.220740] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.228267] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281357, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.733877] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281357, 'name': CreateVM_Task, 'duration_secs': 0.27146} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.734276] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c307f560-e474-441f-b099-53c2fd290488] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1545.734815] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.735032] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.735581] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1545.735581] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68bc6238-f82e-4db0-b5e8-4f57e54dd188 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.740112] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 1545.740112] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ab057d-ce19-5237-671f-286d77fc20f7" [ 1545.740112] env[61440]: _type = "Task" [ 1545.740112] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.748897] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ab057d-ce19-5237-671f-286d77fc20f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.250786] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.250920] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.251124] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.751015] env[61440]: DEBUG nova.compute.manager [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Received event network-changed-97f549b5-efa3-4e36-9bce-9d2431477c6a {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1546.751319] env[61440]: DEBUG nova.compute.manager [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Refreshing instance network info cache due to event network-changed-97f549b5-efa3-4e36-9bce-9d2431477c6a. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1546.751738] env[61440]: DEBUG oslo_concurrency.lockutils [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] Acquiring lock "refresh_cache-c307f560-e474-441f-b099-53c2fd290488" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.751923] env[61440]: DEBUG oslo_concurrency.lockutils [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] Acquired lock "refresh_cache-c307f560-e474-441f-b099-53c2fd290488" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.752105] env[61440]: DEBUG nova.network.neutron [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Refreshing network info cache for port 97f549b5-efa3-4e36-9bce-9d2431477c6a {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1547.190350] env[61440]: DEBUG nova.network.neutron [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Updated VIF entry in instance network info cache for port 97f549b5-efa3-4e36-9bce-9d2431477c6a. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1547.190350] env[61440]: DEBUG nova.network.neutron [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] [instance: c307f560-e474-441f-b099-53c2fd290488] Updating instance_info_cache with network_info: [{"id": "97f549b5-efa3-4e36-9bce-9d2431477c6a", "address": "fa:16:3e:c9:87:3b", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97f549b5-ef", "ovs_interfaceid": "97f549b5-efa3-4e36-9bce-9d2431477c6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.201814] env[61440]: DEBUG oslo_concurrency.lockutils [req-5357e86f-cde6-49c3-8564-083274392173 req-2b8a0d2b-3257-4273-a51e-908b0ad08e16 service nova] Releasing lock "refresh_cache-c307f560-e474-441f-b099-53c2fd290488" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.557083] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "f99f2c72-3158-46db-b21b-7f0066539252" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.557364] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "f99f2c72-3158-46db-b21b-7f0066539252" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.904246] env[61440]: DEBUG oslo_concurrency.lockutils [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "c307f560-e474-441f-b099-53c2fd290488" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.274237] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1583.274593] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1583.274637] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1587.276057] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1587.276057] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1587.276057] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1587.299910] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300092] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300229] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300387] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300533] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300680] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300807] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.300932] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.301061] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.301184] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1587.301320] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1587.301797] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.269204] env[61440]: WARNING oslo_vmware.rw_handles [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1589.269204] env[61440]: ERROR oslo_vmware.rw_handles [ 1589.269856] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1589.271498] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1589.271747] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Copying Virtual Disk [datastore2] vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/fbb559b0-b9cf-4399-9a1b-8381d8d6bab4/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1589.272207] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f19a139e-5bbe-4f98-8e8b-b8b5010d8dcb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.275341] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.280509] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1589.280509] env[61440]: value = "task-4281358" [ 1589.280509] env[61440]: _type = "Task" [ 1589.280509] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.288077] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': task-4281358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.790457] env[61440]: DEBUG oslo_vmware.exceptions [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1589.790720] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.791272] env[61440]: ERROR nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1589.791272] env[61440]: Faults: ['InvalidArgument'] [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] Traceback (most recent call last): [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] yield resources [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self.driver.spawn(context, instance, image_meta, [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self._fetch_image_if_missing(context, vi) [ 1589.791272] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] image_cache(vi, tmp_image_ds_loc) [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] vm_util.copy_virtual_disk( [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] session._wait_for_task(vmdk_copy_task) [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] return self.wait_for_task(task_ref) [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] return evt.wait() [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] result = hub.switch() [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1589.791632] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] return self.greenlet.switch() [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self.f(*self.args, **self.kw) [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] raise exceptions.translate_fault(task_info.error) [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] Faults: ['InvalidArgument'] [ 1589.792065] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] [ 1589.792065] env[61440]: INFO nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Terminating instance [ 1589.793277] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.793557] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1589.793854] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83d6fca0-736c-4fc5-9f29-57bf31b82a29 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.796559] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1589.796822] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1589.797629] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27697d4c-10ce-453f-874a-b86606761bf7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.804314] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1589.804536] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-347d2b5d-d90b-469c-a847-83c3d15123a0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.806604] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1589.806777] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1589.807763] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eaf0ce1-6fa2-4ee0-b5da-0e46a6f6e8fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.812185] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1589.812185] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520c57f2-77c6-93ef-1875-84f01926a983" [ 1589.812185] env[61440]: _type = "Task" [ 1589.812185] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.825135] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520c57f2-77c6-93ef-1875-84f01926a983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.873723] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1589.873942] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1589.874138] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Deleting the datastore file [datastore2] faf90964-1814-459f-89ef-0a27808077c1 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1589.874395] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd82249f-91d2-4636-947b-8e8fb72a1c70 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.880427] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for the task: (returnval){ [ 1589.880427] env[61440]: value = "task-4281360" [ 1589.880427] env[61440]: _type = "Task" [ 1589.880427] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.887835] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': task-4281360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.274501] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.287020] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.287020] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.287157] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.287273] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1590.288357] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39932c6-63f0-49f2-8fdb-defa771da26a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.296889] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702ec2dd-24d9-462d-a346-230cc1a9be5a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.310207] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33dd162-89db-4f0d-8fd9-cf4cdac5a6c7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.317697] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3130e614-c1cd-451a-9f9d-06a7b5332325 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.324166] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1590.324431] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating directory with path [datastore2] vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1590.324638] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57fcaeeb-ebb0-41b2-96ed-c937e54ad6fd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.350695] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180682MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1590.350834] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.351032] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.361586] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Created directory with path [datastore2] vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1590.361778] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Fetch image to [datastore2] vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1590.361979] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1590.362715] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f55fa8-a41b-41c0-adeb-0ba477acb7ac {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.368937] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570f3f2e-1312-4ca8-b9a5-ce6aa326f936 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.377642] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e63d84-4552-4395-9dff-3a83037656fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.389251] env[61440]: DEBUG oslo_vmware.api [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Task: {'id': task-4281360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066827} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.416779] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1590.416951] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1590.417116] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1590.417304] env[61440]: INFO nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1590.422810] env[61440]: DEBUG nova.compute.claims [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1590.422998] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.423729] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5adf8e9-c8d0-4412-a63b-d6747865015a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.429842] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-18139644-9991-494d-85c4-ca576830b99c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.448012] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance faf90964-1814-459f-89ef-0a27808077c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448194] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 726c5ed6-d706-4886-a2c1-fc666a527662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448325] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448448] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448566] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448684] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448830] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.448955] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.449086] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.449306] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.452066] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1590.465521] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.476697] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 41d7967c-65be-4198-936e-1137afa763dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.487419] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9d6c9151-6d22-41fe-8f69-fd17758a20b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.497794] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.508057] env[61440]: DEBUG oslo_vmware.rw_handles [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1590.510961] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.584790] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ee5f4d65-3264-451a-9e9e-8a7e47b1b527 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.589867] env[61440]: DEBUG oslo_vmware.rw_handles [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1590.590077] env[61440]: DEBUG oslo_vmware.rw_handles [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1590.596778] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.597087] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1590.597177] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1590.790598] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7789744-47a4-4ffe-9a96-d6a26f4bb669 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.797776] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3eda14-e335-4b1e-892b-2c4a90589014 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.827337] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f86a940-f9ab-4366-895a-6651cc46ee48 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.834141] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea2ad74-ba47-41ba-8b64-4193aaf954a9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.846505] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.855805] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1590.868910] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1590.869101] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.518s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.869352] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.446s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.108062] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6692f7f6-3ac3-444a-816a-8c1dcd58fe8a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.115454] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4efba6-dc64-40be-99b0-12b735cef3f4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.146025] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02fc7f5-0f21-4001-a7dd-813db1e06877 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.153142] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19be21c4-bfb2-483e-89ce-bdae020c1260 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.166952] env[61440]: DEBUG nova.compute.provider_tree [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1591.176763] env[61440]: DEBUG nova.scheduler.client.report [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1591.193749] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.193877] env[61440]: ERROR nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1591.193877] env[61440]: Faults: ['InvalidArgument'] [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] Traceback (most recent call last): [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self.driver.spawn(context, instance, image_meta, [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self._fetch_image_if_missing(context, vi) [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] image_cache(vi, tmp_image_ds_loc) [ 1591.193877] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] vm_util.copy_virtual_disk( [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] session._wait_for_task(vmdk_copy_task) [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] return self.wait_for_task(task_ref) [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] return evt.wait() [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] result = hub.switch() [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] return self.greenlet.switch() [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1591.194244] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] self.f(*self.args, **self.kw) [ 1591.194559] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1591.194559] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] raise exceptions.translate_fault(task_info.error) [ 1591.194559] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1591.194559] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] Faults: ['InvalidArgument'] [ 1591.194559] env[61440]: ERROR nova.compute.manager [instance: faf90964-1814-459f-89ef-0a27808077c1] [ 1591.194559] env[61440]: DEBUG nova.compute.utils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1591.197428] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Build of instance faf90964-1814-459f-89ef-0a27808077c1 was re-scheduled: A specified parameter was not correct: fileType [ 1591.197428] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1591.197834] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1591.198027] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1591.198215] env[61440]: DEBUG nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1591.198383] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1591.624486] env[61440]: DEBUG nova.network.neutron [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.638926] env[61440]: INFO nova.compute.manager [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Took 0.44 seconds to deallocate network for instance. [ 1591.759731] env[61440]: INFO nova.scheduler.client.report [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Deleted allocations for instance faf90964-1814-459f-89ef-0a27808077c1 [ 1591.780255] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9f7fe85f-270e-4da0-9c91-e676fadc6a2a tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "faf90964-1814-459f-89ef-0a27808077c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 686.800s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.781438] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "faf90964-1814-459f-89ef-0a27808077c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 489.345s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.781578] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Acquiring lock "faf90964-1814-459f-89ef-0a27808077c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.782042] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "faf90964-1814-459f-89ef-0a27808077c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.782042] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "faf90964-1814-459f-89ef-0a27808077c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.787227] env[61440]: INFO nova.compute.manager [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Terminating instance [ 1591.789136] env[61440]: DEBUG nova.compute.manager [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1591.789887] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1591.789887] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b16895f-b76c-4b18-8409-afb13ad71e06 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.799626] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93722c5-ba60-47d5-9f4a-0bbe93b43bbc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.810007] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1591.836963] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance faf90964-1814-459f-89ef-0a27808077c1 could not be found. [ 1591.837186] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1591.837363] env[61440]: INFO nova.compute.manager [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] [instance: faf90964-1814-459f-89ef-0a27808077c1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1591.837604] env[61440]: DEBUG oslo.service.loopingcall [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.837833] env[61440]: DEBUG nova.compute.manager [-] [instance: faf90964-1814-459f-89ef-0a27808077c1] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1591.837926] env[61440]: DEBUG nova.network.neutron [-] [instance: faf90964-1814-459f-89ef-0a27808077c1] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1591.869028] env[61440]: DEBUG nova.network.neutron [-] [instance: faf90964-1814-459f-89ef-0a27808077c1] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.870653] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.870891] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.872406] env[61440]: INFO nova.compute.claims [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1591.875215] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.876927] env[61440]: INFO nova.compute.manager [-] [instance: faf90964-1814-459f-89ef-0a27808077c1] Took 0.04 seconds to deallocate network for instance. [ 1591.960822] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b7a1e7af-e63b-4745-9caf-cb978be58c7d tempest-ListServerFiltersTestJSON-1300390397 tempest-ListServerFiltersTestJSON-1300390397-project-member] Lock "faf90964-1814-459f-89ef-0a27808077c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.961305] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "faf90964-1814-459f-89ef-0a27808077c1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 446.331s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.961389] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: faf90964-1814-459f-89ef-0a27808077c1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1591.961508] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "faf90964-1814-459f-89ef-0a27808077c1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.101346] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92fc3de-7a9f-4879-b8ca-eea60d769a3f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.109679] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390b867c-0f90-4710-98c0-d888238e0323 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.142590] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70731a3-e98a-4c65-a5aa-d79df81c3fed {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.151036] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514ae3cc-a495-43de-9f4c-9515c4a7e574 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.162126] env[61440]: DEBUG oslo_concurrency.lockutils [None req-38ce6206-58c6-4870-8684-4eef8ad8f51a tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "4ab24b15-4808-4a3c-81d3-a1282e633cf8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.162525] env[61440]: DEBUG oslo_concurrency.lockutils [None req-38ce6206-58c6-4870-8684-4eef8ad8f51a tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "4ab24b15-4808-4a3c-81d3-a1282e633cf8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.173276] env[61440]: DEBUG nova.compute.provider_tree [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.182720] env[61440]: DEBUG nova.scheduler.client.report [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1592.197584] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.198086] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1592.230380] env[61440]: DEBUG nova.compute.utils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1592.231673] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1592.232288] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1592.241114] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1592.306373] env[61440]: DEBUG nova.policy [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f35f4b3a2d094a93b9f8f65d766ebd4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4a61e96a8d34f76be5f32a3f9dff73b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1592.309692] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1592.336397] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1592.336905] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1592.337418] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1592.340960] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1592.340960] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1592.340960] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1592.340960] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1592.340960] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1592.341347] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1592.341347] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1592.341347] env[61440]: DEBUG nova.virt.hardware [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1592.341347] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e617835-5640-49bf-9a4b-cc07d172a89b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.349030] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dc0554-c090-41e0-a68d-8ae65e071ef4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.714206] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Successfully created port: c872fb92-1874-4865-92dd-1e81809560c8 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1593.274908] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.436328] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Successfully updated port: c872fb92-1874-4865-92dd-1e81809560c8 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1593.453166] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "refresh_cache-53a5db32-d312-488e-8193-df4504736fc7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.453325] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired lock "refresh_cache-53a5db32-d312-488e-8193-df4504736fc7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.453548] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1593.522216] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1593.664168] env[61440]: DEBUG nova.compute.manager [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Received event network-vif-plugged-c872fb92-1874-4865-92dd-1e81809560c8 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1593.664401] env[61440]: DEBUG oslo_concurrency.lockutils [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] Acquiring lock "53a5db32-d312-488e-8193-df4504736fc7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.664606] env[61440]: DEBUG oslo_concurrency.lockutils [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] Lock "53a5db32-d312-488e-8193-df4504736fc7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.664945] env[61440]: DEBUG oslo_concurrency.lockutils [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] Lock "53a5db32-d312-488e-8193-df4504736fc7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.665125] env[61440]: DEBUG nova.compute.manager [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] No waiting events found dispatching network-vif-plugged-c872fb92-1874-4865-92dd-1e81809560c8 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1593.665295] env[61440]: WARNING nova.compute.manager [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Received unexpected event network-vif-plugged-c872fb92-1874-4865-92dd-1e81809560c8 for instance with vm_state building and task_state spawning. [ 1593.665464] env[61440]: DEBUG nova.compute.manager [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Received event network-changed-c872fb92-1874-4865-92dd-1e81809560c8 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1593.665623] env[61440]: DEBUG nova.compute.manager [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Refreshing instance network info cache due to event network-changed-c872fb92-1874-4865-92dd-1e81809560c8. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1593.665969] env[61440]: DEBUG oslo_concurrency.lockutils [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] Acquiring lock "refresh_cache-53a5db32-d312-488e-8193-df4504736fc7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.705345] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Updating instance_info_cache with network_info: [{"id": "c872fb92-1874-4865-92dd-1e81809560c8", "address": "fa:16:3e:15:f2:7b", "network": {"id": "1224c4d7-dd39-47ab-8325-0c20b78083bc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1689851426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4a61e96a8d34f76be5f32a3f9dff73b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc872fb92-18", "ovs_interfaceid": "c872fb92-1874-4865-92dd-1e81809560c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.718545] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Releasing lock "refresh_cache-53a5db32-d312-488e-8193-df4504736fc7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.718843] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance network_info: |[{"id": "c872fb92-1874-4865-92dd-1e81809560c8", "address": "fa:16:3e:15:f2:7b", "network": {"id": "1224c4d7-dd39-47ab-8325-0c20b78083bc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1689851426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4a61e96a8d34f76be5f32a3f9dff73b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc872fb92-18", "ovs_interfaceid": "c872fb92-1874-4865-92dd-1e81809560c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1593.718952] env[61440]: DEBUG oslo_concurrency.lockutils [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] Acquired lock "refresh_cache-53a5db32-d312-488e-8193-df4504736fc7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.719157] env[61440]: DEBUG nova.network.neutron [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Refreshing network info cache for port c872fb92-1874-4865-92dd-1e81809560c8 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1593.720236] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:f2:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c3e2368-4a35-4aa5-9135-23daedbbf9ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c872fb92-1874-4865-92dd-1e81809560c8', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1593.727821] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating folder: Project (d4a61e96a8d34f76be5f32a3f9dff73b). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1593.730691] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eea8f49c-cfc3-4705-bf4d-821b600c6931 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.748382] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Created folder: Project (d4a61e96a8d34f76be5f32a3f9dff73b) in parent group-v843372. [ 1593.748611] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating folder: Instances. Parent ref: group-v843461. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1593.748844] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fc49ae7-7f7a-4d1d-9881-257b589c9aae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.757326] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Created folder: Instances in parent group-v843461. [ 1593.757554] env[61440]: DEBUG oslo.service.loopingcall [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.757734] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1593.757929] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8ca651b-bd33-4299-a6fa-eb1b2924af08 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.780339] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1593.780339] env[61440]: value = "task-4281363" [ 1593.780339] env[61440]: _type = "Task" [ 1593.780339] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.791442] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281363, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.996800] env[61440]: DEBUG nova.network.neutron [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Updated VIF entry in instance network info cache for port c872fb92-1874-4865-92dd-1e81809560c8. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1593.997198] env[61440]: DEBUG nova.network.neutron [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Updating instance_info_cache with network_info: [{"id": "c872fb92-1874-4865-92dd-1e81809560c8", "address": "fa:16:3e:15:f2:7b", "network": {"id": "1224c4d7-dd39-47ab-8325-0c20b78083bc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1689851426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4a61e96a8d34f76be5f32a3f9dff73b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc872fb92-18", "ovs_interfaceid": "c872fb92-1874-4865-92dd-1e81809560c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.006986] env[61440]: DEBUG oslo_concurrency.lockutils [req-9859e728-62e8-496f-b76e-4c1f037b7d8d req-14eb4998-637d-43ad-b8c5-b759f8f91757 service nova] Releasing lock "refresh_cache-53a5db32-d312-488e-8193-df4504736fc7" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.270056] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.290019] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281363, 'name': CreateVM_Task, 'duration_secs': 0.28322} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.290197] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1594.290885] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.291066] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.291382] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1594.291625] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee6c3cd7-bab3-4d44-9315-d158f859bd7e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.295903] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for the task: (returnval){ [ 1594.295903] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]529d3978-7f2c-5ae0-adc7-4c14303d66af" [ 1594.295903] env[61440]: _type = "Task" [ 1594.295903] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.302821] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]529d3978-7f2c-5ae0-adc7-4c14303d66af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.805923] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.806257] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1594.806457] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.269353] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.471901] env[61440]: DEBUG oslo_concurrency.lockutils [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "53a5db32-d312-488e-8193-df4504736fc7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.464344] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.464580] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.149798] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2595f302-27a8-430d-afc7-77e021c724d2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Acquiring lock "a7e53bc4-8461-411a-9b45-66678b9bb31f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.150164] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2595f302-27a8-430d-afc7-77e021c724d2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "a7e53bc4-8461-411a-9b45-66678b9bb31f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.286618] env[61440]: WARNING oslo_vmware.rw_handles [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1639.286618] env[61440]: ERROR oslo_vmware.rw_handles [ 1639.287310] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1639.289038] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1639.289305] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Copying Virtual Disk [datastore2] vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/3211f465-250f-4cad-bb64-fec7dd96f6e8/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1639.289595] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc3109f4-9c46-4b9b-bf54-a80a4535b398 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.297451] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1639.297451] env[61440]: value = "task-4281364" [ 1639.297451] env[61440]: _type = "Task" [ 1639.297451] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.305385] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': task-4281364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.808351] env[61440]: DEBUG oslo_vmware.exceptions [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1639.808633] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.809211] env[61440]: ERROR nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1639.809211] env[61440]: Faults: ['InvalidArgument'] [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Traceback (most recent call last): [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] yield resources [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self.driver.spawn(context, instance, image_meta, [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self._fetch_image_if_missing(context, vi) [ 1639.809211] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] image_cache(vi, tmp_image_ds_loc) [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] vm_util.copy_virtual_disk( [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] session._wait_for_task(vmdk_copy_task) [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] return self.wait_for_task(task_ref) [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] return evt.wait() [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] result = hub.switch() [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1639.809610] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] return self.greenlet.switch() [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self.f(*self.args, **self.kw) [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] raise exceptions.translate_fault(task_info.error) [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Faults: ['InvalidArgument'] [ 1639.809988] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] [ 1639.809988] env[61440]: INFO nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Terminating instance [ 1639.811165] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.811373] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1639.811606] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cf590ad-5bb6-412f-bc69-5569dae6618e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.813992] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1639.814210] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1639.814921] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5f70db-acbf-417c-bdb6-9d55fdbea631 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.821744] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1639.821996] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-effb18f2-143f-4fc3-a6e9-50b0c44dc0dd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.824122] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1639.824301] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1639.825300] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-956a493f-3aef-47e4-8840-9949e638eb00 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.830277] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Waiting for the task: (returnval){ [ 1639.830277] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f81a1e-a762-bdc4-ec22-b2ea3a76add6" [ 1639.830277] env[61440]: _type = "Task" [ 1639.830277] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.844246] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1639.844493] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Creating directory with path [datastore2] vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1639.844684] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83329070-81ab-4fdd-a086-fe1b2b3f28a4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.865044] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Created directory with path [datastore2] vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1639.865044] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Fetch image to [datastore2] vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1639.865044] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1639.865757] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03700043-3b02-4ac1-a6bd-1b0845e3f5aa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.873058] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0955702e-d194-48d6-8828-a63570fcf812 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.882281] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14848e4-f855-4515-8a64-8f191f942c3b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.914034] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6059ac04-3b7b-4be7-9e25-ba6204c4167e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.916651] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1639.916897] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1639.917100] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Deleting the datastore file [datastore2] 726c5ed6-d706-4886-a2c1-fc666a527662 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1639.917342] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1629879e-2d37-4a4c-8b17-24a2fc36c355 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.922756] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-98bba071-bb86-4eec-a5f2-7948c8498b7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.926832] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1639.926832] env[61440]: value = "task-4281366" [ 1639.926832] env[61440]: _type = "Task" [ 1639.926832] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.934305] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': task-4281366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.945413] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1640.149054] env[61440]: DEBUG oslo_vmware.rw_handles [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1640.206945] env[61440]: DEBUG oslo_vmware.rw_handles [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1640.207731] env[61440]: DEBUG oslo_vmware.rw_handles [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1640.436358] env[61440]: DEBUG oslo_vmware.api [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': task-4281366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07511} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.436718] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1640.436759] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1640.436939] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1640.437134] env[61440]: INFO nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1640.439550] env[61440]: DEBUG nova.compute.claims [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1640.439754] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.439937] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.670240] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c63d767-aa87-4f41-8738-386a9e3427a4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.678035] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f9fa6a-de05-4dd5-bbad-a2bf07d3a4d9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.707452] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306a6439-9a34-4c97-8e2a-f00dc27da01e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.714395] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2598d0a2-4deb-475a-80a1-17efc5310574 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.727336] env[61440]: DEBUG nova.compute.provider_tree [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.736568] env[61440]: DEBUG nova.scheduler.client.report [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1640.755218] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.315s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.755819] env[61440]: ERROR nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1640.755819] env[61440]: Faults: ['InvalidArgument'] [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Traceback (most recent call last): [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self.driver.spawn(context, instance, image_meta, [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self._fetch_image_if_missing(context, vi) [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] image_cache(vi, tmp_image_ds_loc) [ 1640.755819] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] vm_util.copy_virtual_disk( [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] session._wait_for_task(vmdk_copy_task) [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] return self.wait_for_task(task_ref) [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] return evt.wait() [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] result = hub.switch() [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] return self.greenlet.switch() [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1640.756202] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] self.f(*self.args, **self.kw) [ 1640.756552] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1640.756552] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] raise exceptions.translate_fault(task_info.error) [ 1640.756552] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1640.756552] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Faults: ['InvalidArgument'] [ 1640.756552] env[61440]: ERROR nova.compute.manager [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] [ 1640.756701] env[61440]: DEBUG nova.compute.utils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1640.760749] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Build of instance 726c5ed6-d706-4886-a2c1-fc666a527662 was re-scheduled: A specified parameter was not correct: fileType [ 1640.760749] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1640.761157] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1640.761337] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1640.761510] env[61440]: DEBUG nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1640.761674] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1641.183994] env[61440]: DEBUG nova.network.neutron [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.195480] env[61440]: INFO nova.compute.manager [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Took 0.43 seconds to deallocate network for instance. [ 1641.298270] env[61440]: INFO nova.scheduler.client.report [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Deleted allocations for instance 726c5ed6-d706-4886-a2c1-fc666a527662 [ 1641.322882] env[61440]: DEBUG oslo_concurrency.lockutils [None req-eff8fe5b-20f0-4142-a041-29865b13fed7 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "726c5ed6-d706-4886-a2c1-fc666a527662" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 691.087s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.324111] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "726c5ed6-d706-4886-a2c1-fc666a527662" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 495.694s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.324326] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] During sync_power_state the instance has a pending task (spawning). Skip. [ 1641.324505] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "726c5ed6-d706-4886-a2c1-fc666a527662" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.324989] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "726c5ed6-d706-4886-a2c1-fc666a527662" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 494.527s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.325670] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "726c5ed6-d706-4886-a2c1-fc666a527662-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.325906] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "726c5ed6-d706-4886-a2c1-fc666a527662-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.326218] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "726c5ed6-d706-4886-a2c1-fc666a527662-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.328186] env[61440]: INFO nova.compute.manager [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Terminating instance [ 1641.329931] env[61440]: DEBUG nova.compute.manager [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1641.330142] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1641.330832] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ec42f4c-3d0e-4eaa-9164-63f48306db11 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.335749] env[61440]: DEBUG nova.compute.manager [None req-fc527aa2-5927-4f97-82dd-f4598b1d6eb4 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 41d7967c-65be-4198-936e-1137afa763dd] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1641.342803] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3727bec4-bb16-46c0-a697-433860cd56c8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.361872] env[61440]: DEBUG nova.compute.manager [None req-fc527aa2-5927-4f97-82dd-f4598b1d6eb4 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 41d7967c-65be-4198-936e-1137afa763dd] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1641.373227] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 726c5ed6-d706-4886-a2c1-fc666a527662 could not be found. [ 1641.373428] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1641.373606] env[61440]: INFO nova.compute.manager [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1641.373855] env[61440]: DEBUG oslo.service.loopingcall [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.374358] env[61440]: DEBUG nova.compute.manager [-] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1641.374463] env[61440]: DEBUG nova.network.neutron [-] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1641.392703] env[61440]: DEBUG oslo_concurrency.lockutils [None req-fc527aa2-5927-4f97-82dd-f4598b1d6eb4 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "41d7967c-65be-4198-936e-1137afa763dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.951s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.402185] env[61440]: DEBUG nova.network.neutron [-] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.404749] env[61440]: DEBUG nova.compute.manager [None req-1a13bfdb-c194-4e43-b9b0-c1bf8bf73e69 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: 9d6c9151-6d22-41fe-8f69-fd17758a20b6] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1641.411043] env[61440]: INFO nova.compute.manager [-] [instance: 726c5ed6-d706-4886-a2c1-fc666a527662] Took 0.04 seconds to deallocate network for instance. [ 1641.429108] env[61440]: DEBUG nova.compute.manager [None req-1a13bfdb-c194-4e43-b9b0-c1bf8bf73e69 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: 9d6c9151-6d22-41fe-8f69-fd17758a20b6] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1641.453533] env[61440]: DEBUG oslo_concurrency.lockutils [None req-1a13bfdb-c194-4e43-b9b0-c1bf8bf73e69 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "9d6c9151-6d22-41fe-8f69-fd17758a20b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.963s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.465251] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1641.503683] env[61440]: DEBUG oslo_concurrency.lockutils [None req-9eaaaaf4-5b7e-4e04-b430-06863cb30aef tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "726c5ed6-d706-4886-a2c1-fc666a527662" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.521677] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.522017] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.523738] env[61440]: INFO nova.compute.claims [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1641.813063] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312e3b88-c8ea-406b-ac57-0c7c4aac47ac {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.821194] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780c2239-c6d7-4534-8896-26b593a896bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.853191] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2f42b7-0f3c-4553-b6a2-39f2f2d2de0b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.860538] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7c75f7-c329-4518-a52f-b32f8fc461d0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.873878] env[61440]: DEBUG nova.compute.provider_tree [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.886916] env[61440]: DEBUG nova.scheduler.client.report [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1641.902531] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.380s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.903234] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1641.958360] env[61440]: DEBUG nova.compute.utils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1641.960275] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1641.961160] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1641.970822] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1642.039200] env[61440]: DEBUG nova.policy [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a1d9c07aada4f77a5d060915a1a0a70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f58f5a587de417e9fe4b38027568d53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1642.053466] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1642.090678] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1642.090939] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1642.091113] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1642.091300] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1642.091445] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1642.091590] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1642.091800] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1642.091959] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1642.092149] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1642.092311] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1642.092480] env[61440]: DEBUG nova.virt.hardware [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1642.093545] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bee2341-3d3b-4ef5-8a0d-ae0815383a40 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.102310] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8059cb-93c7-48bc-981a-f3c85b056eeb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.470577] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Successfully created port: 901ed615-6ac3-455b-a5a3-5e1ce6f6516d {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1643.274470] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1643.433065] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Successfully updated port: 901ed615-6ac3-455b-a5a3-5e1ce6f6516d {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1643.451842] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "refresh_cache-9f2d4b43-f7ef-401b-a63d-844e113b7142" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.452016] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquired lock "refresh_cache-9f2d4b43-f7ef-401b-a63d-844e113b7142" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.452390] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1643.464033] env[61440]: DEBUG nova.compute.manager [req-4a55a6c5-9e0e-4ce8-a0a2-a7446986cefa req-faf0e24f-5c84-4abf-89f0-0dbb311e9f16 service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Received event network-vif-plugged-901ed615-6ac3-455b-a5a3-5e1ce6f6516d {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1643.464348] env[61440]: DEBUG oslo_concurrency.lockutils [req-4a55a6c5-9e0e-4ce8-a0a2-a7446986cefa req-faf0e24f-5c84-4abf-89f0-0dbb311e9f16 service nova] Acquiring lock "9f2d4b43-f7ef-401b-a63d-844e113b7142-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.464635] env[61440]: DEBUG oslo_concurrency.lockutils [req-4a55a6c5-9e0e-4ce8-a0a2-a7446986cefa req-faf0e24f-5c84-4abf-89f0-0dbb311e9f16 service nova] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.464887] env[61440]: DEBUG oslo_concurrency.lockutils [req-4a55a6c5-9e0e-4ce8-a0a2-a7446986cefa req-faf0e24f-5c84-4abf-89f0-0dbb311e9f16 service nova] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.465169] env[61440]: DEBUG nova.compute.manager [req-4a55a6c5-9e0e-4ce8-a0a2-a7446986cefa req-faf0e24f-5c84-4abf-89f0-0dbb311e9f16 service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] No waiting events found dispatching network-vif-plugged-901ed615-6ac3-455b-a5a3-5e1ce6f6516d {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1643.465453] env[61440]: WARNING nova.compute.manager [req-4a55a6c5-9e0e-4ce8-a0a2-a7446986cefa req-faf0e24f-5c84-4abf-89f0-0dbb311e9f16 service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Received unexpected event network-vif-plugged-901ed615-6ac3-455b-a5a3-5e1ce6f6516d for instance with vm_state building and task_state spawning. [ 1643.523487] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1643.714449] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Updating instance_info_cache with network_info: [{"id": "901ed615-6ac3-455b-a5a3-5e1ce6f6516d", "address": "fa:16:3e:11:e8:b0", "network": {"id": "bda870f4-d9e5-49b4-92d0-6cd0389007a2", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-168584866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f58f5a587de417e9fe4b38027568d53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap901ed615-6a", "ovs_interfaceid": "901ed615-6ac3-455b-a5a3-5e1ce6f6516d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.728682] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Releasing lock "refresh_cache-9f2d4b43-f7ef-401b-a63d-844e113b7142" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.728980] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance network_info: |[{"id": "901ed615-6ac3-455b-a5a3-5e1ce6f6516d", "address": "fa:16:3e:11:e8:b0", "network": {"id": "bda870f4-d9e5-49b4-92d0-6cd0389007a2", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-168584866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f58f5a587de417e9fe4b38027568d53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap901ed615-6a", "ovs_interfaceid": "901ed615-6ac3-455b-a5a3-5e1ce6f6516d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1643.729409] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:e8:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '901ed615-6ac3-455b-a5a3-5e1ce6f6516d', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1643.738125] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Creating folder: Project (6f58f5a587de417e9fe4b38027568d53). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1643.738654] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8ff5ab3-606d-4eff-a293-d6e420997d6b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.750052] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Created folder: Project (6f58f5a587de417e9fe4b38027568d53) in parent group-v843372. [ 1643.750331] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Creating folder: Instances. Parent ref: group-v843464. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1643.750627] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5048a1fe-a2a8-479c-a2f5-72d40d876cbb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.760245] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Created folder: Instances in parent group-v843464. [ 1643.760470] env[61440]: DEBUG oslo.service.loopingcall [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1643.760646] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1643.760843] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c08f780b-4e54-424c-8613-e7e1a62cb94d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.778167] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1643.778167] env[61440]: value = "task-4281369" [ 1643.778167] env[61440]: _type = "Task" [ 1643.778167] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.786196] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281369, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.289100] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281369, 'name': CreateVM_Task, 'duration_secs': 0.327826} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.289100] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1644.289691] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.289861] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.290216] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1644.290471] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cd894ca-8e97-4863-bc55-1dbdb05b1dea {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.295658] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Waiting for the task: (returnval){ [ 1644.295658] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]521e5402-60f2-68ad-8f2b-d55353c175f1" [ 1644.295658] env[61440]: _type = "Task" [ 1644.295658] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.303145] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]521e5402-60f2-68ad-8f2b-d55353c175f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.805435] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.805754] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1644.805855] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.274598] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.274768] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1645.496835] env[61440]: DEBUG nova.compute.manager [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Received event network-changed-901ed615-6ac3-455b-a5a3-5e1ce6f6516d {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1645.497051] env[61440]: DEBUG nova.compute.manager [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Refreshing instance network info cache due to event network-changed-901ed615-6ac3-455b-a5a3-5e1ce6f6516d. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1645.497260] env[61440]: DEBUG oslo_concurrency.lockutils [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] Acquiring lock "refresh_cache-9f2d4b43-f7ef-401b-a63d-844e113b7142" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.497445] env[61440]: DEBUG oslo_concurrency.lockutils [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] Acquired lock "refresh_cache-9f2d4b43-f7ef-401b-a63d-844e113b7142" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.497582] env[61440]: DEBUG nova.network.neutron [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Refreshing network info cache for port 901ed615-6ac3-455b-a5a3-5e1ce6f6516d {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1645.796365] env[61440]: DEBUG nova.network.neutron [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Updated VIF entry in instance network info cache for port 901ed615-6ac3-455b-a5a3-5e1ce6f6516d. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1645.796627] env[61440]: DEBUG nova.network.neutron [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Updating instance_info_cache with network_info: [{"id": "901ed615-6ac3-455b-a5a3-5e1ce6f6516d", "address": "fa:16:3e:11:e8:b0", "network": {"id": "bda870f4-d9e5-49b4-92d0-6cd0389007a2", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-168584866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f58f5a587de417e9fe4b38027568d53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap901ed615-6a", "ovs_interfaceid": "901ed615-6ac3-455b-a5a3-5e1ce6f6516d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.806737] env[61440]: DEBUG oslo_concurrency.lockutils [req-414ca0b5-10c8-438e-b0f6-2c624d747d0c req-0591dadc-da7a-421c-9fd9-2f1f99714c3c service nova] Releasing lock "refresh_cache-9f2d4b43-f7ef-401b-a63d-844e113b7142" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.023481] env[61440]: DEBUG oslo_concurrency.lockutils [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.274610] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.274820] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1647.274920] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1647.298536] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.298696] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.298858] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.298980] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299110] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299235] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299356] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299517] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299670] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299798] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1647.299915] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1647.300471] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.274562] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.274656] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.274962] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.288617] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.288792] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.288965] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.289141] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1651.290598] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8073f188-6605-498f-afca-2ff79f244e82 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.299017] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749cc65e-4156-47a2-b360-73417b466dd8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.312516] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaac887d-7211-43b3-8e0a-f07aaa251104 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.318646] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970c9efb-67e4-4b7a-9f3d-b9669676c579 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.347015] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180675MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1651.347212] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.347383] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.438639] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance fd9b654a-0651-46ae-a7c9-30743b875e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.438925] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.438925] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439106] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439236] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439358] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439476] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439591] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439705] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.439818] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1651.450885] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.462117] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ee5f4d65-3264-451a-9e9e-8a7e47b1b527 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.474966] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.488740] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4ab24b15-4808-4a3c-81d3-a1282e633cf8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.500735] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.515029] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a7e53bc4-8461-411a-9b45-66678b9bb31f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.515251] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1651.515899] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1651.735063] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ba9eaf-bada-469e-b3a9-57ebaa19cdbb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.742538] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0869afac-3dfc-4b5c-ace6-946d51d56aaf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.771420] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7b8751-b10f-4b1c-b0c0-d9efafb99e23 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.778957] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda237e8-bce8-457d-9a89-3666882dcd81 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.791978] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.803879] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.818165] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1651.818354] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.471s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.817906] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1654.269673] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.307029] env[61440]: WARNING oslo_vmware.rw_handles [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1689.307029] env[61440]: ERROR oslo_vmware.rw_handles [ 1689.307694] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1689.309357] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1689.309609] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Copying Virtual Disk [datastore2] vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/ec053da9-b2ae-42b0-a027-ea2b2bc8ed3a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1689.309950] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66cdef08-e749-4c41-9b96-955244fddd7a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.317605] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Waiting for the task: (returnval){ [ 1689.317605] env[61440]: value = "task-4281370" [ 1689.317605] env[61440]: _type = "Task" [ 1689.317605] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.325196] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Task: {'id': task-4281370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.828759] env[61440]: DEBUG oslo_vmware.exceptions [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1689.829086] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.829839] env[61440]: ERROR nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1689.829839] env[61440]: Faults: ['InvalidArgument'] [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Traceback (most recent call last): [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] yield resources [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self.driver.spawn(context, instance, image_meta, [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self._fetch_image_if_missing(context, vi) [ 1689.829839] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] image_cache(vi, tmp_image_ds_loc) [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] vm_util.copy_virtual_disk( [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] session._wait_for_task(vmdk_copy_task) [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] return self.wait_for_task(task_ref) [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] return evt.wait() [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] result = hub.switch() [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1689.830285] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] return self.greenlet.switch() [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self.f(*self.args, **self.kw) [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] raise exceptions.translate_fault(task_info.error) [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Faults: ['InvalidArgument'] [ 1689.830828] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] [ 1689.830828] env[61440]: INFO nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Terminating instance [ 1689.831800] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.831971] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1689.832499] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1689.832687] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1689.832959] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a4ff81c-9716-4947-89f2-7800dcebf1c8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.835299] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea680ed-769e-4c04-be4d-bc54dbbf0eae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.842248] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1689.842361] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4e1ce89-c4fc-42e7-bf3a-fb4e0ccfb848 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.845398] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1689.845630] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1689.846580] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72a16e16-e587-492d-a382-4fbca759b734 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.851171] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Waiting for the task: (returnval){ [ 1689.851171] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52dfcc15-8f5a-5188-8eb9-1a296f50fc94" [ 1689.851171] env[61440]: _type = "Task" [ 1689.851171] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.859913] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52dfcc15-8f5a-5188-8eb9-1a296f50fc94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.911728] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1689.911851] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1689.912137] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Deleting the datastore file [datastore2] fd9b654a-0651-46ae-a7c9-30743b875e2e {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1689.912468] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d452384-6186-4840-a29f-ee8ab5b2e48c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.918998] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Waiting for the task: (returnval){ [ 1689.918998] env[61440]: value = "task-4281372" [ 1689.918998] env[61440]: _type = "Task" [ 1689.918998] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.926859] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Task: {'id': task-4281372, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.361846] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1690.362225] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Creating directory with path [datastore2] vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1690.362346] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f13b88bd-77b0-4ae9-9b84-db75786012e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.374078] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Created directory with path [datastore2] vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1690.374283] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Fetch image to [datastore2] vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1690.374456] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1690.375177] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f371590c-ce06-4303-a936-52030052a709 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.381835] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2d6213-c2d2-4a91-af4a-496b1df76e4d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.390658] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c779a6af-9ea2-4250-9a89-cfe4855ea329 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.423837] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f95bec-52be-48e4-9a59-b31a7281e82e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.430745] env[61440]: DEBUG oslo_vmware.api [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Task: {'id': task-4281372, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061883} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.432342] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1690.432426] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1690.432643] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1690.432713] env[61440]: INFO nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1690.434842] env[61440]: DEBUG nova.compute.claims [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1690.435040] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.435261] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.437778] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d02305c1-f491-4e33-8ad1-c9b76ce6bf5d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.462064] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1690.595202] env[61440]: DEBUG oslo_vmware.rw_handles [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1690.654592] env[61440]: DEBUG oslo_vmware.rw_handles [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1690.654817] env[61440]: DEBUG oslo_vmware.rw_handles [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1690.739828] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f0d7a6-9d6a-4edf-94e5-907d257632cd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.747597] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87cdbdb-f80d-44a2-93ce-8d9fbf0a1c61 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.776382] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e499004-9ed2-4eec-9743-5720ad75538b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.783842] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f811a134-bfc1-436d-8d66-9e3eb7ceffe5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.798489] env[61440]: DEBUG nova.compute.provider_tree [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.808362] env[61440]: DEBUG nova.scheduler.client.report [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1690.822315] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.387s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.822761] env[61440]: ERROR nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1690.822761] env[61440]: Faults: ['InvalidArgument'] [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Traceback (most recent call last): [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self.driver.spawn(context, instance, image_meta, [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self._fetch_image_if_missing(context, vi) [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] image_cache(vi, tmp_image_ds_loc) [ 1690.822761] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] vm_util.copy_virtual_disk( [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] session._wait_for_task(vmdk_copy_task) [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] return self.wait_for_task(task_ref) [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] return evt.wait() [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] result = hub.switch() [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] return self.greenlet.switch() [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1690.823152] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] self.f(*self.args, **self.kw) [ 1690.823527] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1690.823527] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] raise exceptions.translate_fault(task_info.error) [ 1690.823527] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1690.823527] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Faults: ['InvalidArgument'] [ 1690.823527] env[61440]: ERROR nova.compute.manager [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] [ 1690.823527] env[61440]: DEBUG nova.compute.utils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1690.824905] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Build of instance fd9b654a-0651-46ae-a7c9-30743b875e2e was re-scheduled: A specified parameter was not correct: fileType [ 1690.824905] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1690.825317] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1690.825489] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1690.825656] env[61440]: DEBUG nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1690.825820] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1691.379770] env[61440]: DEBUG nova.network.neutron [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.394969] env[61440]: INFO nova.compute.manager [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Took 0.57 seconds to deallocate network for instance. [ 1691.507718] env[61440]: INFO nova.scheduler.client.report [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Deleted allocations for instance fd9b654a-0651-46ae-a7c9-30743b875e2e [ 1691.530037] env[61440]: DEBUG oslo_concurrency.lockutils [None req-db28b368-1525-478b-a69e-849c097b1054 tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 694.175s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.531158] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 497.169s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.531393] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "fd9b654a-0651-46ae-a7c9-30743b875e2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.531609] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.532065] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.534945] env[61440]: INFO nova.compute.manager [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Terminating instance [ 1691.536038] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquiring lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.536096] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Acquired lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.536306] env[61440]: DEBUG nova.network.neutron [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1691.545980] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1691.567376] env[61440]: DEBUG nova.network.neutron [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1691.596861] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.597116] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.598647] env[61440]: INFO nova.compute.claims [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1691.725381] env[61440]: DEBUG nova.network.neutron [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.737279] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Releasing lock "refresh_cache-fd9b654a-0651-46ae-a7c9-30743b875e2e" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.737902] env[61440]: DEBUG nova.compute.manager [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1691.738257] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1691.739056] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7c2e52d-d97d-4f9c-823f-ddc8981c94d0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.754464] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef6c9ee-ca76-4d0d-8c70-a5442953aaf8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.795711] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fd9b654a-0651-46ae-a7c9-30743b875e2e could not be found. [ 1691.795927] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1691.796130] env[61440]: INFO nova.compute.manager [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1691.796387] env[61440]: DEBUG oslo.service.loopingcall [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1691.796922] env[61440]: DEBUG nova.compute.manager [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1691.797045] env[61440]: DEBUG nova.network.neutron [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1691.822318] env[61440]: DEBUG nova.network.neutron [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1691.831690] env[61440]: DEBUG nova.network.neutron [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.840445] env[61440]: INFO nova.compute.manager [-] [instance: fd9b654a-0651-46ae-a7c9-30743b875e2e] Took 0.04 seconds to deallocate network for instance. [ 1691.899812] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474409a2-8f2b-496a-ac6d-8807061026e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.912456] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf02025-a8db-452e-9ab2-4b96eddb9f85 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.941475] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd53d18-79ba-410f-97f4-c3501c434126 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.948980] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1491ee3f-0ee1-4ea6-82e8-c4ccc62a3346 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.956270] env[61440]: DEBUG oslo_concurrency.lockutils [None req-27548ba5-4793-4d46-b0ff-b7b4f176577e tempest-ListServersNegativeTestJSON-1210561073 tempest-ListServersNegativeTestJSON-1210561073-project-member] Lock "fd9b654a-0651-46ae-a7c9-30743b875e2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.425s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.965465] env[61440]: DEBUG nova.compute.provider_tree [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1691.974496] env[61440]: DEBUG nova.scheduler.client.report [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1691.986801] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.390s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.004332] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "b6873503-f6b4-4b6b-bb44-9471404662db" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.004562] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "b6873503-f6b4-4b6b-bb44-9471404662db" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.009459] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "b6873503-f6b4-4b6b-bb44-9471404662db" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.005s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.009918] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1692.042220] env[61440]: DEBUG nova.compute.utils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1692.043449] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1692.043647] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1692.052541] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1692.108449] env[61440]: DEBUG nova.policy [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13942f6086034a28a46d57da6f5ca593', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2155c29279d549989eb21b099eb7b8b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1692.114271] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1692.142335] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1692.142579] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1692.142760] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1692.142969] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1692.143145] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1692.143300] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1692.143516] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1692.143745] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1692.143924] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1692.144105] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1692.144286] env[61440]: DEBUG nova.virt.hardware [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1692.145196] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02378f9-0643-46cc-9b34-28545c83c117 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.153374] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9102b891-e743-44e7-9da7-d026f0a5d232 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.448060] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Successfully created port: 9e6bacb6-6099-4411-b6c0-3cc8ebd84676 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1693.186327] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Successfully updated port: 9e6bacb6-6099-4411-b6c0-3cc8ebd84676 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1693.198407] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "refresh_cache-e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.198564] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquired lock "refresh_cache-e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.198714] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1693.240952] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1693.420352] env[61440]: DEBUG nova.compute.manager [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Received event network-vif-plugged-9e6bacb6-6099-4411-b6c0-3cc8ebd84676 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1693.420570] env[61440]: DEBUG oslo_concurrency.lockutils [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] Acquiring lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.420773] env[61440]: DEBUG oslo_concurrency.lockutils [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.420936] env[61440]: DEBUG oslo_concurrency.lockutils [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.421118] env[61440]: DEBUG nova.compute.manager [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] No waiting events found dispatching network-vif-plugged-9e6bacb6-6099-4411-b6c0-3cc8ebd84676 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1693.421284] env[61440]: WARNING nova.compute.manager [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Received unexpected event network-vif-plugged-9e6bacb6-6099-4411-b6c0-3cc8ebd84676 for instance with vm_state building and task_state spawning. [ 1693.421444] env[61440]: DEBUG nova.compute.manager [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Received event network-changed-9e6bacb6-6099-4411-b6c0-3cc8ebd84676 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1693.421600] env[61440]: DEBUG nova.compute.manager [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Refreshing instance network info cache due to event network-changed-9e6bacb6-6099-4411-b6c0-3cc8ebd84676. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1693.421761] env[61440]: DEBUG oslo_concurrency.lockutils [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] Acquiring lock "refresh_cache-e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.445175] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Updating instance_info_cache with network_info: [{"id": "9e6bacb6-6099-4411-b6c0-3cc8ebd84676", "address": "fa:16:3e:e7:38:87", "network": {"id": "7c59cae7-e012-464b-86f6-f72771c6e965", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1246344957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2155c29279d549989eb21b099eb7b8b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e6bacb6-60", "ovs_interfaceid": "9e6bacb6-6099-4411-b6c0-3cc8ebd84676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.459174] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Releasing lock "refresh_cache-e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.459498] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance network_info: |[{"id": "9e6bacb6-6099-4411-b6c0-3cc8ebd84676", "address": "fa:16:3e:e7:38:87", "network": {"id": "7c59cae7-e012-464b-86f6-f72771c6e965", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1246344957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2155c29279d549989eb21b099eb7b8b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e6bacb6-60", "ovs_interfaceid": "9e6bacb6-6099-4411-b6c0-3cc8ebd84676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1693.459737] env[61440]: DEBUG oslo_concurrency.lockutils [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] Acquired lock "refresh_cache-e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.459915] env[61440]: DEBUG nova.network.neutron [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Refreshing network info cache for port 9e6bacb6-6099-4411-b6c0-3cc8ebd84676 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1693.460955] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:38:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e6bacb6-6099-4411-b6c0-3cc8ebd84676', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1693.468433] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Creating folder: Project (2155c29279d549989eb21b099eb7b8b5). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1693.471208] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18dda85b-5a38-4f60-9d8e-624b868eeff1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.481730] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Created folder: Project (2155c29279d549989eb21b099eb7b8b5) in parent group-v843372. [ 1693.481905] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Creating folder: Instances. Parent ref: group-v843467. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1693.482134] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40b2ec64-ce5a-4c39-916a-a302506ad1ac {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.491741] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Created folder: Instances in parent group-v843467. [ 1693.491963] env[61440]: DEBUG oslo.service.loopingcall [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.492158] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1693.492355] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c4f0b8b-9346-4225-9c6a-d097b6a7fefa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.512956] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1693.512956] env[61440]: value = "task-4281375" [ 1693.512956] env[61440]: _type = "Task" [ 1693.512956] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.520264] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281375, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.758566] env[61440]: DEBUG nova.network.neutron [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Updated VIF entry in instance network info cache for port 9e6bacb6-6099-4411-b6c0-3cc8ebd84676. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1693.758931] env[61440]: DEBUG nova.network.neutron [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Updating instance_info_cache with network_info: [{"id": "9e6bacb6-6099-4411-b6c0-3cc8ebd84676", "address": "fa:16:3e:e7:38:87", "network": {"id": "7c59cae7-e012-464b-86f6-f72771c6e965", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1246344957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2155c29279d549989eb21b099eb7b8b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e6bacb6-60", "ovs_interfaceid": "9e6bacb6-6099-4411-b6c0-3cc8ebd84676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.768832] env[61440]: DEBUG oslo_concurrency.lockutils [req-90ae82e5-11ed-47a9-9864-7af971c45b7b req-1fbc46b0-08d3-48f1-ae25-1291729be61e service nova] Releasing lock "refresh_cache-e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.022923] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281375, 'name': CreateVM_Task, 'duration_secs': 0.338989} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.023103] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1694.023837] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.024013] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.024377] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1694.024635] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f30d83-1e02-4319-ab4e-f608d3243880 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.028947] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Waiting for the task: (returnval){ [ 1694.028947] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520cb201-bd02-b023-d597-22fbc5849e1c" [ 1694.028947] env[61440]: _type = "Task" [ 1694.028947] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.036153] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520cb201-bd02-b023-d597-22fbc5849e1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.539621] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.540100] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1694.540365] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.245325] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.274032] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1705.274412] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1705.274412] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances with incomplete migration {{(pid=61440) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1706.283759] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1706.284087] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1707.274888] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.275136] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1707.275283] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1707.300065] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300325] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300370] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300477] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300597] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300759] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300834] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.300958] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.301087] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.301204] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1707.301319] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1709.274748] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.275439] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.282786] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.283068] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.283266] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.294405] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.294617] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.294781] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.294971] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1712.296471] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb2e964-86cb-4406-af00-ab79c31ac239 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.305052] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135203c6-061f-4908-b8e7-ec1b63fba236 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.318795] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaa967e-e2e3-4048-8f9a-84675ab233d6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.325033] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e564eaa-3e1a-4fba-b7b0-5f8246f49077 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.354263] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180574MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1712.354401] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.354585] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.530646] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.530820] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.530952] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531099] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531222] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531344] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531461] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531576] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531693] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.531817] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.543751] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1712.554488] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4ab24b15-4808-4a3c-81d3-a1282e633cf8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1712.564845] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1712.574058] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a7e53bc4-8461-411a-9b45-66678b9bb31f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1712.574318] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1712.574467] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1712.589728] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing inventories for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1712.604235] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating ProviderTree inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1712.604746] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1712.615178] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing aggregate associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, aggregates: None {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1712.632327] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing trait associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1712.781469] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55e9156-a1c1-4fce-a4e4-afd918989756 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.789251] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04299958-ea96-414f-8e90-7ba0b0074345 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.819060] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f224550-bb44-4ca5-9608-c0b43c9fe6ef {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.826105] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c85f0b-aea2-4b59-9190-2d6bbff2fa5e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.839087] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.847772] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1712.864378] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1712.864566] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.510s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.856311] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.270216] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.269636] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.274735] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.275156] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1726.284432] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] There are 0 instances to clean {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1738.902222] env[61440]: WARNING oslo_vmware.rw_handles [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1738.902222] env[61440]: ERROR oslo_vmware.rw_handles [ 1738.903106] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1738.904609] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1738.904862] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Copying Virtual Disk [datastore2] vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/e5814f94-7db1-4acb-98bd-b010ed95bf78/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1738.905160] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa4bf48e-ec05-4043-bb20-ebb54030cf74 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.912975] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Waiting for the task: (returnval){ [ 1738.912975] env[61440]: value = "task-4281376" [ 1738.912975] env[61440]: _type = "Task" [ 1738.912975] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.921294] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Task: {'id': task-4281376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.423975] env[61440]: DEBUG oslo_vmware.exceptions [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1739.424451] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.425044] env[61440]: ERROR nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1739.425044] env[61440]: Faults: ['InvalidArgument'] [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Traceback (most recent call last): [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] yield resources [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self.driver.spawn(context, instance, image_meta, [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self._fetch_image_if_missing(context, vi) [ 1739.425044] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] image_cache(vi, tmp_image_ds_loc) [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] vm_util.copy_virtual_disk( [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] session._wait_for_task(vmdk_copy_task) [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] return self.wait_for_task(task_ref) [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] return evt.wait() [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] result = hub.switch() [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1739.425421] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] return self.greenlet.switch() [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self.f(*self.args, **self.kw) [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] raise exceptions.translate_fault(task_info.error) [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Faults: ['InvalidArgument'] [ 1739.425857] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] [ 1739.425857] env[61440]: INFO nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Terminating instance [ 1739.426959] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.427181] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.427796] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1739.427983] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1739.428224] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d9455e3-05f5-4200-9cf4-cdf08117597a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.430585] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0075bb-8cc2-4724-a25c-c6c4741ce46e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.437765] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1739.438027] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-850bf7ab-0dbd-4fdb-8d42-c3e5b4bd306d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.442325] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.442325] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1739.442325] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeace0fe-efb4-443c-ad3d-04c1068ec82b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.446467] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Waiting for the task: (returnval){ [ 1739.446467] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]522c8dc4-cf5a-66f9-e006-c37a72bb6c9f" [ 1739.446467] env[61440]: _type = "Task" [ 1739.446467] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.456088] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]522c8dc4-cf5a-66f9-e006-c37a72bb6c9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.505155] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1739.505405] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1739.505556] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Deleting the datastore file [datastore2] e607fbab-cf85-46c0-81a8-5397fc3b9d2d {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1739.505818] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06812015-d588-4e16-88a3-7a330b130569 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.511626] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Waiting for the task: (returnval){ [ 1739.511626] env[61440]: value = "task-4281378" [ 1739.511626] env[61440]: _type = "Task" [ 1739.511626] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.518987] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Task: {'id': task-4281378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.957273] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1739.957273] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Creating directory with path [datastore2] vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.957758] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a7b1b40-c5e0-4e45-afa2-a095374962b7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.969346] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Created directory with path [datastore2] vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.969565] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Fetch image to [datastore2] vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1739.969743] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1739.970542] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf712ee-3213-400b-bf19-c24a1ad65302 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.977313] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc4b8e1-f50c-4a0d-ba86-d19ddbb60995 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.987158] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d36204-dae6-477e-a289-a5b931282b29 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.021262] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa19e00b-4c2e-41bb-99c3-adb35a3becae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.028692] env[61440]: DEBUG oslo_vmware.api [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Task: {'id': task-4281378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089418} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.030278] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1740.030474] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1740.030689] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1740.030876] env[61440]: INFO nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1740.032720] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eb587146-3021-485d-a562-87d4c2e929ff {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.034883] env[61440]: DEBUG nova.compute.claims [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1740.035304] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.035304] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.059231] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1740.262849] env[61440]: DEBUG oslo_vmware.rw_handles [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1740.326308] env[61440]: DEBUG oslo_vmware.rw_handles [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1740.326506] env[61440]: DEBUG oslo_vmware.rw_handles [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1740.362114] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68d6344-8524-4b29-b103-f9c3e98c3fb7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.370829] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6427a0eb-6c0f-4394-bc1f-a2ffcc1c40d0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.404454] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e776a1f-8859-4b88-b0d2-024744c756d5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.411601] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9678be-9e14-4742-bf0a-06ceedcc88fd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.424826] env[61440]: DEBUG nova.compute.provider_tree [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.435075] env[61440]: DEBUG nova.scheduler.client.report [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1740.450484] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.415s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.451045] env[61440]: ERROR nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1740.451045] env[61440]: Faults: ['InvalidArgument'] [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Traceback (most recent call last): [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self.driver.spawn(context, instance, image_meta, [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self._fetch_image_if_missing(context, vi) [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] image_cache(vi, tmp_image_ds_loc) [ 1740.451045] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] vm_util.copy_virtual_disk( [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] session._wait_for_task(vmdk_copy_task) [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] return self.wait_for_task(task_ref) [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] return evt.wait() [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] result = hub.switch() [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] return self.greenlet.switch() [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1740.451395] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] self.f(*self.args, **self.kw) [ 1740.451742] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1740.451742] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] raise exceptions.translate_fault(task_info.error) [ 1740.451742] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1740.451742] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Faults: ['InvalidArgument'] [ 1740.451742] env[61440]: ERROR nova.compute.manager [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] [ 1740.451878] env[61440]: DEBUG nova.compute.utils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1740.453373] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Build of instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d was re-scheduled: A specified parameter was not correct: fileType [ 1740.453373] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1740.453829] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1740.454056] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1740.454276] env[61440]: DEBUG nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1740.454571] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1740.806822] env[61440]: DEBUG nova.network.neutron [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.817260] env[61440]: INFO nova.compute.manager [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Took 0.36 seconds to deallocate network for instance. [ 1740.920025] env[61440]: INFO nova.scheduler.client.report [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Deleted allocations for instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d [ 1740.954987] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2e7eef78-297b-48b1-b75c-1a5f4f086364 tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 687.763s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.956378] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 492.148s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.956607] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Acquiring lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.958179] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.960818] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.963375] env[61440]: INFO nova.compute.manager [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Terminating instance [ 1740.973138] env[61440]: DEBUG nova.compute.manager [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1740.973467] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1740.974259] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2006cd1e-f184-4835-8be8-66de78b2d820 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.980051] env[61440]: DEBUG nova.compute.manager [None req-3a2e324a-cd30-4ecf-8eea-cfaab3aa43c6 tempest-ServersNegativeTestMultiTenantJSON-63877719 tempest-ServersNegativeTestMultiTenantJSON-63877719-project-member] [instance: ee5f4d65-3264-451a-9e9e-8a7e47b1b527] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1740.998213] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf601bb7-02d6-48df-8994-0bb65012c463 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.014975] env[61440]: DEBUG nova.compute.manager [None req-3a2e324a-cd30-4ecf-8eea-cfaab3aa43c6 tempest-ServersNegativeTestMultiTenantJSON-63877719 tempest-ServersNegativeTestMultiTenantJSON-63877719-project-member] [instance: ee5f4d65-3264-451a-9e9e-8a7e47b1b527] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.030070] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e607fbab-cf85-46c0-81a8-5397fc3b9d2d could not be found. [ 1741.030311] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1741.030838] env[61440]: INFO nova.compute.manager [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1741.030838] env[61440]: DEBUG oslo.service.loopingcall [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1741.031462] env[61440]: DEBUG nova.compute.manager [-] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1741.031591] env[61440]: DEBUG nova.network.neutron [-] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1741.044496] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a2e324a-cd30-4ecf-8eea-cfaab3aa43c6 tempest-ServersNegativeTestMultiTenantJSON-63877719 tempest-ServersNegativeTestMultiTenantJSON-63877719-project-member] Lock "ee5f4d65-3264-451a-9e9e-8a7e47b1b527" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.280s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.053659] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.091669] env[61440]: DEBUG nova.network.neutron [-] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.099411] env[61440]: INFO nova.compute.manager [-] [instance: e607fbab-cf85-46c0-81a8-5397fc3b9d2d] Took 0.07 seconds to deallocate network for instance. [ 1741.105330] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.105598] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.106960] env[61440]: INFO nova.compute.claims [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1741.196952] env[61440]: DEBUG oslo_concurrency.lockutils [None req-6b30873f-32ee-45f5-a59b-4436a9076b5a tempest-ImagesOneServerNegativeTestJSON-240382933 tempest-ImagesOneServerNegativeTestJSON-240382933-project-member] Lock "e607fbab-cf85-46c0-81a8-5397fc3b9d2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.241s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.308178] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b163c9-5322-4a1c-a01a-9d0e0dae7a9b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.316135] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a953599-7c5e-45d8-90e2-c3eccb568994 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.345351] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1248283e-566a-4ee4-b1a4-4d8fc7cf7867 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.352417] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f983dce-81fe-42c4-831e-c7d913037fae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.365054] env[61440]: DEBUG nova.compute.provider_tree [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.394531] env[61440]: DEBUG nova.scheduler.client.report [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1741.409090] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.409664] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1741.441033] env[61440]: DEBUG nova.compute.utils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1741.445020] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1741.445020] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1741.451858] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1741.511996] env[61440]: DEBUG nova.policy [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44bee088109e493e8845873fe373db93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c9b7ca1bb3c449799dccbcebf6d801d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1741.516787] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1741.543259] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1741.543503] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1741.543662] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1741.543847] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1741.543996] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1741.544163] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1741.544376] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1741.544566] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1741.544743] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1741.544911] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1741.545619] env[61440]: DEBUG nova.virt.hardware [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1741.546594] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1065ef53-f5b0-4621-914d-d1f6ee8518a2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.554045] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644f2745-430c-4e3b-8bf1-7e85451c196b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.872902] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Successfully created port: dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1742.629315] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Successfully updated port: dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1742.640894] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "refresh_cache-f99f2c72-3158-46db-b21b-7f0066539252" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.643395] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquired lock "refresh_cache-f99f2c72-3158-46db-b21b-7f0066539252" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.643576] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1742.709453] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1742.968357] env[61440]: DEBUG nova.compute.manager [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Received event network-vif-plugged-dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1742.968603] env[61440]: DEBUG oslo_concurrency.lockutils [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] Acquiring lock "f99f2c72-3158-46db-b21b-7f0066539252-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.968774] env[61440]: DEBUG oslo_concurrency.lockutils [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] Lock "f99f2c72-3158-46db-b21b-7f0066539252-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.968911] env[61440]: DEBUG oslo_concurrency.lockutils [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] Lock "f99f2c72-3158-46db-b21b-7f0066539252-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.969090] env[61440]: DEBUG nova.compute.manager [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] No waiting events found dispatching network-vif-plugged-dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1742.969260] env[61440]: WARNING nova.compute.manager [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Received unexpected event network-vif-plugged-dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 for instance with vm_state building and task_state spawning. [ 1742.969470] env[61440]: DEBUG nova.compute.manager [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Received event network-changed-dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1742.969570] env[61440]: DEBUG nova.compute.manager [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Refreshing instance network info cache due to event network-changed-dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1742.969737] env[61440]: DEBUG oslo_concurrency.lockutils [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] Acquiring lock "refresh_cache-f99f2c72-3158-46db-b21b-7f0066539252" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.120258] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Updating instance_info_cache with network_info: [{"id": "dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24", "address": "fa:16:3e:b7:fa:75", "network": {"id": "c8c48824-feed-4f59-8d24-585f77ea43a9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-101240612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c9b7ca1bb3c449799dccbcebf6d801d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd2e39be-5c", "ovs_interfaceid": "dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.132108] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Releasing lock "refresh_cache-f99f2c72-3158-46db-b21b-7f0066539252" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.132481] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance network_info: |[{"id": "dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24", "address": "fa:16:3e:b7:fa:75", "network": {"id": "c8c48824-feed-4f59-8d24-585f77ea43a9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-101240612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c9b7ca1bb3c449799dccbcebf6d801d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd2e39be-5c", "ovs_interfaceid": "dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1743.132820] env[61440]: DEBUG oslo_concurrency.lockutils [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] Acquired lock "refresh_cache-f99f2c72-3158-46db-b21b-7f0066539252" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.133028] env[61440]: DEBUG nova.network.neutron [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Refreshing network info cache for port dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1743.134148] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:fa:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '17c839f5-4de0-449c-9a24-4e0e2fca37ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1743.141663] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Creating folder: Project (0c9b7ca1bb3c449799dccbcebf6d801d). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1743.144835] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc46f978-37e7-4140-b3b7-e3f36201b5aa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.157029] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Created folder: Project (0c9b7ca1bb3c449799dccbcebf6d801d) in parent group-v843372. [ 1743.157208] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Creating folder: Instances. Parent ref: group-v843470. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1743.157442] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-099948ba-6f49-4cb7-be90-c888430f75bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.166758] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Created folder: Instances in parent group-v843470. [ 1743.166997] env[61440]: DEBUG oslo.service.loopingcall [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1743.167191] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1743.167393] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6b88199-79a1-4a17-95f5-a73306af286e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.187849] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1743.187849] env[61440]: value = "task-4281381" [ 1743.187849] env[61440]: _type = "Task" [ 1743.187849] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.195592] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281381, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.437819] env[61440]: DEBUG nova.network.neutron [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Updated VIF entry in instance network info cache for port dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1743.438227] env[61440]: DEBUG nova.network.neutron [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Updating instance_info_cache with network_info: [{"id": "dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24", "address": "fa:16:3e:b7:fa:75", "network": {"id": "c8c48824-feed-4f59-8d24-585f77ea43a9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-101240612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c9b7ca1bb3c449799dccbcebf6d801d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "17c839f5-4de0-449c-9a24-4e0e2fca37ca", "external-id": "nsx-vlan-transportzone-198", "segmentation_id": 198, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd2e39be-5c", "ovs_interfaceid": "dd2e39be-5ce8-478f-aeb0-4efbf3fa0d24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.448587] env[61440]: DEBUG oslo_concurrency.lockutils [req-61507333-b889-45ac-a7db-97aecfefc44a req-95c14139-716e-477a-93b4-504db86e8ac0 service nova] Releasing lock "refresh_cache-f99f2c72-3158-46db-b21b-7f0066539252" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.697547] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281381, 'name': CreateVM_Task, 'duration_secs': 0.2827} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.697827] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1743.698457] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.698647] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.698971] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1743.699238] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb78770c-b1df-4809-a49c-af7ff7992246 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.703648] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Waiting for the task: (returnval){ [ 1743.703648] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526de203-9dc4-23fc-030f-175a9c412cd3" [ 1743.703648] env[61440]: _type = "Task" [ 1743.703648] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.711496] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]526de203-9dc4-23fc-030f-175a9c412cd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.213586] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.213840] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1744.214070] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.534023] env[61440]: DEBUG oslo_concurrency.lockutils [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "f99f2c72-3158-46db-b21b-7f0066539252" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.444082] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.444416] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.283702] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.274261] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.274437] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1769.274765] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1769.275188] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1769.275188] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1769.297599] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.297783] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.297849] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.297990] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298153] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298245] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298365] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298511] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298594] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298719] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1769.298841] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1770.274017] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.274801] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.274671] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.274928] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.287153] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.287381] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.287547] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.287718] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1773.288821] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e080e85-cbed-496c-abf0-f674ac72d573 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.297922] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9f6d23-c71b-4bdd-ab77-60b3fc43d667 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.311515] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9195485c-81f4-4b9b-ac40-c01eede6bac3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.317420] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0772e080-e159-47c3-8f07-1e2fe2f0f263 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.346556] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180675MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1773.346683] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.346862] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.416718] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 269e724a-100e-4112-9c06-8a36871538ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.416869] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417009] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417148] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417270] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417389] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417508] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417625] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417738] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.417851] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.428313] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 4ab24b15-4808-4a3c-81d3-a1282e633cf8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.437986] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.447207] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance a7e53bc4-8461-411a-9b45-66678b9bb31f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.456555] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.456771] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1773.456916] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1773.625930] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cbb49d-e45a-42be-9dfc-cb43daa46293 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.633470] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfedc65-7fc9-4542-9b16-57c99172461e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.662296] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb5150a-3a51-48c4-90ce-f0732aa58ce5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.669461] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a76bd3e-48b1-43f8-a5ab-364b5a21c5c3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.683211] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.692488] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1773.706767] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1773.706935] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.360s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.993106] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_power_states {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.015686] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Getting list of instances from cluster (obj){ [ 1774.015686] env[61440]: value = "domain-c8" [ 1774.015686] env[61440]: _type = "ClusterComputeResource" [ 1774.015686] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1774.017743] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ce94ca-2f35-43d8-88f9-a17b59a57040 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.042046] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Got total of 10 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1774.042216] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 269e724a-100e-4112-9c06-8a36871538ac {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.042409] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 608ac5c2-3518-4da0-992f-a752584165a7 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.042572] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 2486ea17-09bd-410d-a96d-bc863c3354e2 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.042727] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid ccdd9481-6f4b-4a84-9f05-a4709b6615d9 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.042888] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.043049] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid c307f560-e474-441f-b099-53c2fd290488 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.043213] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 53a5db32-d312-488e-8193-df4504736fc7 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.043364] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 9f2d4b43-f7ef-401b-a63d-844e113b7142 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.043512] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.043659] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid f99f2c72-3158-46db-b21b-7f0066539252 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1774.043976] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "269e724a-100e-4112-9c06-8a36871538ac" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.044236] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "608ac5c2-3518-4da0-992f-a752584165a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.044432] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "2486ea17-09bd-410d-a96d-bc863c3354e2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.044629] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.044821] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.045085] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "c307f560-e474-441f-b099-53c2fd290488" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.045311] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "53a5db32-d312-488e-8193-df4504736fc7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.045505] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.045691] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.045877] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "f99f2c72-3158-46db-b21b-7f0066539252" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.327685] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.270854] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.521970] env[61440]: WARNING oslo_vmware.rw_handles [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1788.521970] env[61440]: ERROR oslo_vmware.rw_handles [ 1788.521970] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1788.523501] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1788.523890] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Copying Virtual Disk [datastore2] vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/fa508c58-8e3d-44f4-8a8c-21c799569287/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1788.524220] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0542a49-8951-4ec3-adc8-927deaabb408 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.532821] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Waiting for the task: (returnval){ [ 1788.532821] env[61440]: value = "task-4281382" [ 1788.532821] env[61440]: _type = "Task" [ 1788.532821] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.540966] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Task: {'id': task-4281382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.709693] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "abbd5d5b-7821-435d-ac56-0d070ff08043" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.709926] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.042698] env[61440]: DEBUG oslo_vmware.exceptions [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1789.042978] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.043555] env[61440]: ERROR nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1789.043555] env[61440]: Faults: ['InvalidArgument'] [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] Traceback (most recent call last): [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] yield resources [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self.driver.spawn(context, instance, image_meta, [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self._fetch_image_if_missing(context, vi) [ 1789.043555] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] image_cache(vi, tmp_image_ds_loc) [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] vm_util.copy_virtual_disk( [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] session._wait_for_task(vmdk_copy_task) [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] return self.wait_for_task(task_ref) [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] return evt.wait() [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] result = hub.switch() [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1789.043947] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] return self.greenlet.switch() [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self.f(*self.args, **self.kw) [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] raise exceptions.translate_fault(task_info.error) [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] Faults: ['InvalidArgument'] [ 1789.044435] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] [ 1789.044435] env[61440]: INFO nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Terminating instance [ 1789.045448] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.045653] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.046284] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1789.046498] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1789.046722] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f2d923b-a913-4c2b-84a9-60c99b153389 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.048884] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b757ed-4f04-4b96-8092-90d33fc36a0b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.055337] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1789.055547] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23861280-b9d5-4247-9aa6-2a6192fe767a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.057654] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.057845] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1789.058813] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a1a3e4-6b9d-44b2-b3e6-535283f0bcd6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.063345] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 1789.063345] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]523bd274-67d5-e48b-f1bd-4aa4d5149769" [ 1789.063345] env[61440]: _type = "Task" [ 1789.063345] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.070031] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]523bd274-67d5-e48b-f1bd-4aa4d5149769, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.128133] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1789.128339] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1789.128510] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Deleting the datastore file [datastore2] 269e724a-100e-4112-9c06-8a36871538ac {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1789.128761] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bf172f6-cf98-4c80-8577-3d5f0ac799e7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.135365] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Waiting for the task: (returnval){ [ 1789.135365] env[61440]: value = "task-4281384" [ 1789.135365] env[61440]: _type = "Task" [ 1789.135365] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.143512] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Task: {'id': task-4281384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.575756] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1789.575756] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating directory with path [datastore2] vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.575756] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-528d56a3-0920-472a-b09f-4931cbd4cd9c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.593287] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Created directory with path [datastore2] vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.594105] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Fetch image to [datastore2] vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1789.594105] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1789.594537] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90db6d35-f1b8-42e7-9906-72735770fadc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.601457] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b242101d-c79a-416c-81db-5211fd2e319b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.611090] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb61050-34d0-443a-b86d-829b6c12a8fb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.644467] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc20b75-53c5-43ea-82a6-1247c55592a7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.653592] env[61440]: DEBUG oslo_vmware.api [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Task: {'id': task-4281384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080697} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.653795] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f29c527-ad3e-40f1-96d0-e6fc07b177ba {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.655626] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.655815] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1789.655991] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1789.656197] env[61440]: INFO nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1789.658743] env[61440]: DEBUG nova.compute.claims [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1789.658932] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.659162] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.676946] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1789.838578] env[61440]: DEBUG oslo_vmware.rw_handles [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1789.898306] env[61440]: DEBUG oslo_vmware.rw_handles [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1789.898439] env[61440]: DEBUG oslo_vmware.rw_handles [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1789.960978] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633f3fee-abf4-4fd4-ab15-b90cdb74d46a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.968551] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425258d7-6db0-4d7e-8553-cb6258e1284d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.997747] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b122e6-102d-4353-847f-e8d89897ac1c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.004123] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ceb5cb-6ecd-4994-accb-4fe6e95131df {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.016414] env[61440]: DEBUG nova.compute.provider_tree [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.026233] env[61440]: DEBUG nova.scheduler.client.report [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.042635] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.043157] env[61440]: ERROR nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1790.043157] env[61440]: Faults: ['InvalidArgument'] [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] Traceback (most recent call last): [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self.driver.spawn(context, instance, image_meta, [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self._fetch_image_if_missing(context, vi) [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] image_cache(vi, tmp_image_ds_loc) [ 1790.043157] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] vm_util.copy_virtual_disk( [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] session._wait_for_task(vmdk_copy_task) [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] return self.wait_for_task(task_ref) [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] return evt.wait() [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] result = hub.switch() [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] return self.greenlet.switch() [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1790.043651] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] self.f(*self.args, **self.kw) [ 1790.044078] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1790.044078] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] raise exceptions.translate_fault(task_info.error) [ 1790.044078] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1790.044078] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] Faults: ['InvalidArgument'] [ 1790.044078] env[61440]: ERROR nova.compute.manager [instance: 269e724a-100e-4112-9c06-8a36871538ac] [ 1790.044078] env[61440]: DEBUG nova.compute.utils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1790.045423] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Build of instance 269e724a-100e-4112-9c06-8a36871538ac was re-scheduled: A specified parameter was not correct: fileType [ 1790.045423] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1790.045797] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1790.045966] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1790.046161] env[61440]: DEBUG nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1790.046356] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1790.376246] env[61440]: DEBUG nova.network.neutron [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.397714] env[61440]: INFO nova.compute.manager [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Took 0.35 seconds to deallocate network for instance. [ 1790.500767] env[61440]: INFO nova.scheduler.client.report [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Deleted allocations for instance 269e724a-100e-4112-9c06-8a36871538ac [ 1790.519608] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2a3b52cb-cda2-4db3-873d-ea12db14cce1 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "269e724a-100e-4112-9c06-8a36871538ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 688.330s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.520660] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "269e724a-100e-4112-9c06-8a36871538ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 492.508s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.520887] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Acquiring lock "269e724a-100e-4112-9c06-8a36871538ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.521105] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "269e724a-100e-4112-9c06-8a36871538ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.521276] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "269e724a-100e-4112-9c06-8a36871538ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.523225] env[61440]: INFO nova.compute.manager [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Terminating instance [ 1790.524834] env[61440]: DEBUG nova.compute.manager [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1790.525037] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1790.525826] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66ebefce-fc63-4e0d-9f19-29309a931034 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.534613] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab002a0-a5dd-4277-bdb2-47fb12ce7cd6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.545115] env[61440]: DEBUG nova.compute.manager [None req-38ce6206-58c6-4870-8684-4eef8ad8f51a tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 4ab24b15-4808-4a3c-81d3-a1282e633cf8] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1790.567359] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 269e724a-100e-4112-9c06-8a36871538ac could not be found. [ 1790.567680] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1790.567920] env[61440]: INFO nova.compute.manager [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1790.568215] env[61440]: DEBUG oslo.service.loopingcall [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.568474] env[61440]: DEBUG nova.compute.manager [-] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1790.568614] env[61440]: DEBUG nova.network.neutron [-] [instance: 269e724a-100e-4112-9c06-8a36871538ac] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1790.570948] env[61440]: DEBUG nova.compute.manager [None req-38ce6206-58c6-4870-8684-4eef8ad8f51a tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 4ab24b15-4808-4a3c-81d3-a1282e633cf8] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1790.591829] env[61440]: DEBUG oslo_concurrency.lockutils [None req-38ce6206-58c6-4870-8684-4eef8ad8f51a tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "4ab24b15-4808-4a3c-81d3-a1282e633cf8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.429s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.595889] env[61440]: DEBUG nova.network.neutron [-] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.603755] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1790.606381] env[61440]: INFO nova.compute.manager [-] [instance: 269e724a-100e-4112-9c06-8a36871538ac] Took 0.04 seconds to deallocate network for instance. [ 1790.658436] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.658718] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.660219] env[61440]: INFO nova.compute.claims [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1790.700026] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3a3313c1-3099-4391-b595-52f0a4180f87 tempest-ServerTagsTestJSON-1767375371 tempest-ServerTagsTestJSON-1767375371-project-member] Lock "269e724a-100e-4112-9c06-8a36871538ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.701124] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "269e724a-100e-4112-9c06-8a36871538ac" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.657s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.701441] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 269e724a-100e-4112-9c06-8a36871538ac] During sync_power_state the instance has a pending task (deleting). Skip. [ 1790.701628] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "269e724a-100e-4112-9c06-8a36871538ac" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.893103] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17db3dd-348e-4d7f-aa46-2b3ec105e483 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.900862] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d974ba-8d70-4b16-9eef-60a3c059d238 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.931252] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7666108f-1bdc-47f4-91e9-583d09a3f9b3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.938439] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd616236-2c41-4947-a468-580cec90e21a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.951104] env[61440]: DEBUG nova.compute.provider_tree [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.960634] env[61440]: DEBUG nova.scheduler.client.report [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.976353] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.976878] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1791.014017] env[61440]: DEBUG nova.compute.utils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1791.015780] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1791.015780] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1791.027144] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1791.071810] env[61440]: DEBUG nova.policy [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8be059e7fab4a84b58f00f1490fdb41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4eab358aa42d42659e93d2ead48ed0a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1791.089629] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1791.115822] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.116089] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.116259] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.116441] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.116587] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.116734] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.116938] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.117113] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.117281] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.117443] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.117619] env[61440]: DEBUG nova.virt.hardware [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.118490] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0d8709-1a5a-41d7-b20c-9a24b33f385e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.126486] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bff1d4-185f-45ae-8b77-407b5814ad21 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.394492] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Successfully created port: 9cbe9231-e86a-4ee3-88d3-33be645dfd17 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1792.079098] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Successfully updated port: 9cbe9231-e86a-4ee3-88d3-33be645dfd17 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1792.092458] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "refresh_cache-f5547fbd-9d74-4217-bba3-3747b3f3c9b2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.092605] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "refresh_cache-f5547fbd-9d74-4217-bba3-3747b3f3c9b2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.093146] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1792.151434] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1792.354282] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Updating instance_info_cache with network_info: [{"id": "9cbe9231-e86a-4ee3-88d3-33be645dfd17", "address": "fa:16:3e:7e:7e:1e", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbe9231-e8", "ovs_interfaceid": "9cbe9231-e86a-4ee3-88d3-33be645dfd17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.366657] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "refresh_cache-f5547fbd-9d74-4217-bba3-3747b3f3c9b2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.366941] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance network_info: |[{"id": "9cbe9231-e86a-4ee3-88d3-33be645dfd17", "address": "fa:16:3e:7e:7e:1e", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbe9231-e8", "ovs_interfaceid": "9cbe9231-e86a-4ee3-88d3-33be645dfd17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1792.367346] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:7e:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cbe9231-e86a-4ee3-88d3-33be645dfd17', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1792.374804] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Creating folder: Project (4eab358aa42d42659e93d2ead48ed0a9). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1792.375340] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cefc377-ce19-463e-bf57-005cb3d335e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.387450] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Created folder: Project (4eab358aa42d42659e93d2ead48ed0a9) in parent group-v843372. [ 1792.387450] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Creating folder: Instances. Parent ref: group-v843473. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1792.387605] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cff2f7a-1864-40b9-bac3-ede7d3a1635b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.396299] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Created folder: Instances in parent group-v843473. [ 1792.396520] env[61440]: DEBUG oslo.service.loopingcall [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1792.396688] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1792.396867] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2f4d360-c97e-4e13-b4d6-213a577ca29c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.416260] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1792.416260] env[61440]: value = "task-4281387" [ 1792.416260] env[61440]: _type = "Task" [ 1792.416260] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.423509] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281387, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.446905] env[61440]: DEBUG nova.compute.manager [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Received event network-vif-plugged-9cbe9231-e86a-4ee3-88d3-33be645dfd17 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1792.447136] env[61440]: DEBUG oslo_concurrency.lockutils [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] Acquiring lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.447362] env[61440]: DEBUG oslo_concurrency.lockutils [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.447617] env[61440]: DEBUG oslo_concurrency.lockutils [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.447806] env[61440]: DEBUG nova.compute.manager [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] No waiting events found dispatching network-vif-plugged-9cbe9231-e86a-4ee3-88d3-33be645dfd17 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1792.447973] env[61440]: WARNING nova.compute.manager [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Received unexpected event network-vif-plugged-9cbe9231-e86a-4ee3-88d3-33be645dfd17 for instance with vm_state building and task_state spawning. [ 1792.448147] env[61440]: DEBUG nova.compute.manager [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Received event network-changed-9cbe9231-e86a-4ee3-88d3-33be645dfd17 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1792.448305] env[61440]: DEBUG nova.compute.manager [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Refreshing instance network info cache due to event network-changed-9cbe9231-e86a-4ee3-88d3-33be645dfd17. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1792.448520] env[61440]: DEBUG oslo_concurrency.lockutils [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] Acquiring lock "refresh_cache-f5547fbd-9d74-4217-bba3-3747b3f3c9b2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.448655] env[61440]: DEBUG oslo_concurrency.lockutils [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] Acquired lock "refresh_cache-f5547fbd-9d74-4217-bba3-3747b3f3c9b2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.448810] env[61440]: DEBUG nova.network.neutron [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Refreshing network info cache for port 9cbe9231-e86a-4ee3-88d3-33be645dfd17 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1792.703672] env[61440]: DEBUG nova.network.neutron [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Updated VIF entry in instance network info cache for port 9cbe9231-e86a-4ee3-88d3-33be645dfd17. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1792.704051] env[61440]: DEBUG nova.network.neutron [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Updating instance_info_cache with network_info: [{"id": "9cbe9231-e86a-4ee3-88d3-33be645dfd17", "address": "fa:16:3e:7e:7e:1e", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbe9231-e8", "ovs_interfaceid": "9cbe9231-e86a-4ee3-88d3-33be645dfd17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.713351] env[61440]: DEBUG oslo_concurrency.lockutils [req-398c3092-3d27-4509-9131-973568c9d082 req-20286905-a273-4e47-b586-3bcb4fcb958c service nova] Releasing lock "refresh_cache-f5547fbd-9d74-4217-bba3-3747b3f3c9b2" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.926948] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281387, 'name': CreateVM_Task, 'duration_secs': 0.296144} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.927139] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1792.927810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.927982] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.928324] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.928582] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c18c03e-3b29-4376-a629-1093e5235607 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.932851] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for the task: (returnval){ [ 1792.932851] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52cf4d44-c88d-9ed5-f40d-12d031ffb181" [ 1792.932851] env[61440]: _type = "Task" [ 1792.932851] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.941116] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52cf4d44-c88d-9ed5-f40d-12d031ffb181, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.443630] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.444143] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1793.444143] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.675029] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.675368] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.076707] env[61440]: DEBUG oslo_concurrency.lockutils [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.277063] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.277381] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1829.274660] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.274882] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1829.274993] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1829.302028] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302459] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302459] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302459] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302723] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302723] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302827] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.302889] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.303015] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.303134] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1829.303254] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1829.303734] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.274598] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.274857] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.273882] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.273940] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.285888] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.286152] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.286330] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.286494] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1835.287998] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb7703a-5a51-4686-a630-2102e08ad577 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.296818] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382305fa-b38d-451e-acd4-efda15404447 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.311647] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cef06e7-9e5c-4911-89bb-454b5cc37fbc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.317702] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72edc9a-3855-45fa-9854-e6565d5c4eef {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.345423] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180648MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1835.345562] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.345740] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.426594] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 608ac5c2-3518-4da0-992f-a752584165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.426818] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.426992] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.427162] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.427329] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.427687] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.427863] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428034] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428238] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428353] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.439168] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1835.451922] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1835.460365] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1835.460579] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1835.460723] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1835.610691] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977af2cd-39fb-4988-bae8-e1e08876e631 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.617971] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f460137-d867-476e-ba46-a6e1f8274bfc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.648196] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f04f641-b62b-487e-8804-175e39d5dd8f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.655329] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd51b978-133a-4b5e-899a-0280ee8858f3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.668127] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.677109] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1835.691585] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1835.691788] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.346s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.692335] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.270480] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.358473] env[61440]: WARNING oslo_vmware.rw_handles [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1839.358473] env[61440]: ERROR oslo_vmware.rw_handles [ 1839.359272] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1839.361579] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1839.361850] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Copying Virtual Disk [datastore2] vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/2b7c3194-94b2-45e1-a529-901ad83c9756/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1839.362175] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c6c91d0-1020-4cc0-b902-453d8eebd4de {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.370913] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 1839.370913] env[61440]: value = "task-4281388" [ 1839.370913] env[61440]: _type = "Task" [ 1839.370913] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.378636] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': task-4281388, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.881233] env[61440]: DEBUG oslo_vmware.exceptions [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1839.881526] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.882092] env[61440]: ERROR nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1839.882092] env[61440]: Faults: ['InvalidArgument'] [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Traceback (most recent call last): [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] yield resources [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self.driver.spawn(context, instance, image_meta, [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self._fetch_image_if_missing(context, vi) [ 1839.882092] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] image_cache(vi, tmp_image_ds_loc) [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] vm_util.copy_virtual_disk( [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] session._wait_for_task(vmdk_copy_task) [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] return self.wait_for_task(task_ref) [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] return evt.wait() [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] result = hub.switch() [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1839.882541] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] return self.greenlet.switch() [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self.f(*self.args, **self.kw) [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] raise exceptions.translate_fault(task_info.error) [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Faults: ['InvalidArgument'] [ 1839.882999] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] [ 1839.882999] env[61440]: INFO nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Terminating instance [ 1839.883962] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.884187] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.884429] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18127e62-410a-48fa-be67-b5e1a424106a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.886747] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1839.886941] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1839.887705] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1091a019-4a53-4d9e-8440-182844187106 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.894295] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1839.894515] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc95cc90-ca3a-4a91-b7ce-8f336193c87b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.897043] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.897043] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1839.897827] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e10bfea-ce5a-46ea-b099-3024afca65d2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.902240] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1839.902240] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f3e59c-8f8c-7005-bec3-4f68264bec1b" [ 1839.902240] env[61440]: _type = "Task" [ 1839.902240] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.911249] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52f3e59c-8f8c-7005-bec3-4f68264bec1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.960193] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1839.960387] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1839.960607] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Deleting the datastore file [datastore2] 608ac5c2-3518-4da0-992f-a752584165a7 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1839.960814] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91a0c0b2-10b6-484a-a2c9-8bb2b588c32e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.967275] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for the task: (returnval){ [ 1839.967275] env[61440]: value = "task-4281390" [ 1839.967275] env[61440]: _type = "Task" [ 1839.967275] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.974450] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': task-4281390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.412507] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1840.412847] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating directory with path [datastore2] vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1840.413133] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26d9ec9e-5cb2-49dc-9c5e-d2a52c1d3db6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.424668] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created directory with path [datastore2] vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1840.424900] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Fetch image to [datastore2] vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1840.425069] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1840.425839] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be843dd2-f51b-4830-a5cf-fe72a63d8e14 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.432641] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3974c3f9-9d4a-4497-ad65-ca006e2517dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.441721] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3204e724-6299-420d-89b5-97452db2f99c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.476712] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f73c372-2ca4-4527-ab70-0bae8f2bf7db {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.484880] env[61440]: DEBUG oslo_vmware.api [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Task: {'id': task-4281390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07717} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.485086] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f501cf6e-c631-4360-8336-3be6cf2318d3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.486728] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.486894] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1840.487087] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1840.487303] env[61440]: INFO nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1840.489348] env[61440]: DEBUG nova.compute.claims [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1840.489518] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.489733] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.507179] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1840.560777] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1840.619252] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1840.619446] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1840.733639] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7974be-8608-43e6-a8d0-7274cfdfee6f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.741080] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9a8ef9-195c-4200-a692-8d189af48002 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.772893] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0092ab-5ae8-4591-bea0-8810861f4dc9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.780087] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f2884d-4dd5-40ca-8a94-d8164b65673f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.794580] env[61440]: DEBUG nova.compute.provider_tree [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.803491] env[61440]: DEBUG nova.scheduler.client.report [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1840.818924] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.329s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.819472] env[61440]: ERROR nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1840.819472] env[61440]: Faults: ['InvalidArgument'] [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Traceback (most recent call last): [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self.driver.spawn(context, instance, image_meta, [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self._fetch_image_if_missing(context, vi) [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] image_cache(vi, tmp_image_ds_loc) [ 1840.819472] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] vm_util.copy_virtual_disk( [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] session._wait_for_task(vmdk_copy_task) [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] return self.wait_for_task(task_ref) [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] return evt.wait() [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] result = hub.switch() [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] return self.greenlet.switch() [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1840.819904] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] self.f(*self.args, **self.kw) [ 1840.820303] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1840.820303] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] raise exceptions.translate_fault(task_info.error) [ 1840.820303] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1840.820303] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Faults: ['InvalidArgument'] [ 1840.820303] env[61440]: ERROR nova.compute.manager [instance: 608ac5c2-3518-4da0-992f-a752584165a7] [ 1840.820303] env[61440]: DEBUG nova.compute.utils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1840.821616] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Build of instance 608ac5c2-3518-4da0-992f-a752584165a7 was re-scheduled: A specified parameter was not correct: fileType [ 1840.821616] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1840.821990] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1840.822184] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1840.822358] env[61440]: DEBUG nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1840.822537] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1841.161949] env[61440]: DEBUG nova.network.neutron [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.176799] env[61440]: INFO nova.compute.manager [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Took 0.35 seconds to deallocate network for instance. [ 1841.276850] env[61440]: INFO nova.scheduler.client.report [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Deleted allocations for instance 608ac5c2-3518-4da0-992f-a752584165a7 [ 1841.297956] env[61440]: DEBUG oslo_concurrency.lockutils [None req-604941da-6e17-4d67-aa79-352c2a7f2857 tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "608ac5c2-3518-4da0-992f-a752584165a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 690.180s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.299778] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "608ac5c2-3518-4da0-992f-a752584165a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 495.016s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.300013] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Acquiring lock "608ac5c2-3518-4da0-992f-a752584165a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.300256] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "608ac5c2-3518-4da0-992f-a752584165a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.300463] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "608ac5c2-3518-4da0-992f-a752584165a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.302324] env[61440]: INFO nova.compute.manager [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Terminating instance [ 1841.303957] env[61440]: DEBUG nova.compute.manager [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1841.304166] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1841.304625] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e31d4f8e-11fc-4c62-a2ac-4afe687c1f88 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.309327] env[61440]: DEBUG nova.compute.manager [None req-2595f302-27a8-430d-afc7-77e021c724d2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: a7e53bc4-8461-411a-9b45-66678b9bb31f] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1841.315438] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e409eed-5b7d-4a4f-a2cf-59e1199a8ef5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.333666] env[61440]: DEBUG nova.compute.manager [None req-2595f302-27a8-430d-afc7-77e021c724d2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] [instance: a7e53bc4-8461-411a-9b45-66678b9bb31f] Instance disappeared before build. {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1841.344069] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 608ac5c2-3518-4da0-992f-a752584165a7 could not be found. [ 1841.344285] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1841.344438] env[61440]: INFO nova.compute.manager [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1841.344673] env[61440]: DEBUG oslo.service.loopingcall [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.344880] env[61440]: DEBUG nova.compute.manager [-] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1841.344976] env[61440]: DEBUG nova.network.neutron [-] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1841.361399] env[61440]: DEBUG oslo_concurrency.lockutils [None req-2595f302-27a8-430d-afc7-77e021c724d2 tempest-ImagesTestJSON-692158479 tempest-ImagesTestJSON-692158479-project-member] Lock "a7e53bc4-8461-411a-9b45-66678b9bb31f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.211s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.370759] env[61440]: DEBUG nova.network.neutron [-] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.372398] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1841.378110] env[61440]: INFO nova.compute.manager [-] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] Took 0.03 seconds to deallocate network for instance. [ 1841.421026] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.421316] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.422671] env[61440]: INFO nova.compute.claims [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1841.480010] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7aa80c22-cf2d-4adf-b40a-e59dbdb2a7da tempest-AttachInterfacesTestJSON-1613166396 tempest-AttachInterfacesTestJSON-1613166396-project-member] Lock "608ac5c2-3518-4da0-992f-a752584165a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.480869] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "608ac5c2-3518-4da0-992f-a752584165a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 67.437s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.481076] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 608ac5c2-3518-4da0-992f-a752584165a7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1841.481254] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "608ac5c2-3518-4da0-992f-a752584165a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.621406] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c0bc9c-fd7a-4dfc-a947-c9d8d03369dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.629369] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f462a3d-683b-4394-bc38-d556d694c0a2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.659059] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764f6ffa-52fd-4793-901f-17722bfd7279 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.665829] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6f12ab-2dc1-4c44-a8f6-854705c350f8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.678657] env[61440]: DEBUG nova.compute.provider_tree [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.687412] env[61440]: DEBUG nova.scheduler.client.report [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1841.699695] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.278s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.700159] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1841.731528] env[61440]: DEBUG nova.compute.utils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.732816] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1841.732987] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1841.741948] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1841.808363] env[61440]: DEBUG nova.policy [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02aa958bbbdc49ab8d494fe9afc40779', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a38fb630f3e41acbcd97dee8e89aba8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1841.840455] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1841.868145] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1841.868439] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1841.868638] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1841.868831] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1841.868981] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1841.869154] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1841.869366] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1841.869529] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1841.869695] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1841.869859] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1841.870048] env[61440]: DEBUG nova.virt.hardware [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1841.870901] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d600c05d-9919-4946-b38a-ccdc36682ec3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.879287] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81832cff-782f-4508-91d7-c7e6b3c68ee5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.187908] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Successfully created port: 5518c973-7402-4c5d-b991-08399a12ab27 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1842.957279] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Successfully updated port: 5518c973-7402-4c5d-b991-08399a12ab27 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1842.968408] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "refresh_cache-debbffae-2f2a-4d8e-9630-b3fd8bb932e4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.968543] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "refresh_cache-debbffae-2f2a-4d8e-9630-b3fd8bb932e4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.968665] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1843.017915] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1843.211323] env[61440]: DEBUG nova.compute.manager [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Received event network-vif-plugged-5518c973-7402-4c5d-b991-08399a12ab27 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1843.211546] env[61440]: DEBUG oslo_concurrency.lockutils [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] Acquiring lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.211752] env[61440]: DEBUG oslo_concurrency.lockutils [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.211921] env[61440]: DEBUG oslo_concurrency.lockutils [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.212397] env[61440]: DEBUG nova.compute.manager [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] No waiting events found dispatching network-vif-plugged-5518c973-7402-4c5d-b991-08399a12ab27 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1843.212624] env[61440]: WARNING nova.compute.manager [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Received unexpected event network-vif-plugged-5518c973-7402-4c5d-b991-08399a12ab27 for instance with vm_state building and task_state spawning. [ 1843.212794] env[61440]: DEBUG nova.compute.manager [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Received event network-changed-5518c973-7402-4c5d-b991-08399a12ab27 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1843.212955] env[61440]: DEBUG nova.compute.manager [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Refreshing instance network info cache due to event network-changed-5518c973-7402-4c5d-b991-08399a12ab27. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1843.213145] env[61440]: DEBUG oslo_concurrency.lockutils [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] Acquiring lock "refresh_cache-debbffae-2f2a-4d8e-9630-b3fd8bb932e4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.285647] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Updating instance_info_cache with network_info: [{"id": "5518c973-7402-4c5d-b991-08399a12ab27", "address": "fa:16:3e:0c:0b:df", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5518c973-74", "ovs_interfaceid": "5518c973-7402-4c5d-b991-08399a12ab27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.301470] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "refresh_cache-debbffae-2f2a-4d8e-9630-b3fd8bb932e4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.301754] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance network_info: |[{"id": "5518c973-7402-4c5d-b991-08399a12ab27", "address": "fa:16:3e:0c:0b:df", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5518c973-74", "ovs_interfaceid": "5518c973-7402-4c5d-b991-08399a12ab27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1843.302057] env[61440]: DEBUG oslo_concurrency.lockutils [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] Acquired lock "refresh_cache-debbffae-2f2a-4d8e-9630-b3fd8bb932e4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.302241] env[61440]: DEBUG nova.network.neutron [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Refreshing network info cache for port 5518c973-7402-4c5d-b991-08399a12ab27 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1843.303274] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:0b:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5518c973-7402-4c5d-b991-08399a12ab27', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1843.310932] env[61440]: DEBUG oslo.service.loopingcall [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.313927] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1843.314461] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b343b647-c52b-49f3-a6a4-6600c199f3c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.334860] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1843.334860] env[61440]: value = "task-4281391" [ 1843.334860] env[61440]: _type = "Task" [ 1843.334860] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.342722] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281391, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.599347] env[61440]: DEBUG nova.network.neutron [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Updated VIF entry in instance network info cache for port 5518c973-7402-4c5d-b991-08399a12ab27. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1843.599347] env[61440]: DEBUG nova.network.neutron [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Updating instance_info_cache with network_info: [{"id": "5518c973-7402-4c5d-b991-08399a12ab27", "address": "fa:16:3e:0c:0b:df", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5518c973-74", "ovs_interfaceid": "5518c973-7402-4c5d-b991-08399a12ab27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.609736] env[61440]: DEBUG oslo_concurrency.lockutils [req-245220f0-4457-4cca-8f48-aa2d68731630 req-7d29fa3d-d47c-4b11-add6-68bf8241db91 service nova] Releasing lock "refresh_cache-debbffae-2f2a-4d8e-9630-b3fd8bb932e4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.845082] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281391, 'name': CreateVM_Task, 'duration_secs': 0.276018} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.845230] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1843.852548] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.852720] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.853051] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1843.853295] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f0090f-2c99-4f2f-912c-084baefe303d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.858043] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 1843.858043] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ba57b7-4109-b980-7f43-bcd49b814b71" [ 1843.858043] env[61440]: _type = "Task" [ 1843.858043] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.865634] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ba57b7-4109-b980-7f43-bcd49b814b71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.370507] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.370813] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1844.370990] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.111137] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquiring lock "430f38e6-068a-4c50-b27a-24335bf7e3ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.111391] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Lock "430f38e6-068a-4c50-b27a-24335bf7e3ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.269409] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1889.274698] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1889.718254] env[61440]: WARNING oslo_vmware.rw_handles [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1889.718254] env[61440]: ERROR oslo_vmware.rw_handles [ 1889.718750] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1889.721379] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1889.721684] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Copying Virtual Disk [datastore2] vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/9bd96a0d-7d92-4cb7-a3df-7982d509f85a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1889.722039] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0b38947-0936-4713-a1dc-c46765bbae4b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.730935] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1889.730935] env[61440]: value = "task-4281392" [ 1889.730935] env[61440]: _type = "Task" [ 1889.730935] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.738745] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.241669] env[61440]: DEBUG oslo_vmware.exceptions [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1890.241963] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.242543] env[61440]: ERROR nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1890.242543] env[61440]: Faults: ['InvalidArgument'] [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Traceback (most recent call last): [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] yield resources [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self.driver.spawn(context, instance, image_meta, [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self._fetch_image_if_missing(context, vi) [ 1890.242543] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] image_cache(vi, tmp_image_ds_loc) [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] vm_util.copy_virtual_disk( [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] session._wait_for_task(vmdk_copy_task) [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] return self.wait_for_task(task_ref) [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] return evt.wait() [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] result = hub.switch() [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1890.242826] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] return self.greenlet.switch() [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self.f(*self.args, **self.kw) [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] raise exceptions.translate_fault(task_info.error) [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Faults: ['InvalidArgument'] [ 1890.243122] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] [ 1890.243122] env[61440]: INFO nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Terminating instance [ 1890.244463] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.244678] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1890.244934] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46aa8c07-c107-4a30-abd2-8184fc511e3b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.248617] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1890.248806] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1890.249593] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e60960d-3e45-4f19-99aa-768de5513679 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.252874] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1890.253061] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1890.253982] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5506a0d2-d256-4f06-91e1-f86352c69399 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.257983] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1890.258491] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6de1f27b-ee52-4cae-ac33-cfe0343296d1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.261014] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Waiting for the task: (returnval){ [ 1890.261014] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52b88cc0-5201-5c6f-b4e4-98041d4c3e21" [ 1890.261014] env[61440]: _type = "Task" [ 1890.261014] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.267659] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52b88cc0-5201-5c6f-b4e4-98041d4c3e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.274097] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.274252] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1890.274371] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1890.297166] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.297448] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.297448] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.297588] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.297683] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.297805] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.297915] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.298047] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.298172] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.298289] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1890.298406] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1890.298931] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.299084] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1890.319891] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1890.320124] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1890.320308] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleting the datastore file [datastore2] 2486ea17-09bd-410d-a96d-bc863c3354e2 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.320561] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d93e337-85d6-4c0e-815c-db25368d2d58 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.327259] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1890.327259] env[61440]: value = "task-4281394" [ 1890.327259] env[61440]: _type = "Task" [ 1890.327259] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.334670] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.771436] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1890.771714] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Creating directory with path [datastore2] vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1890.771952] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69bac7e8-cfe2-4442-af99-23858f4209c6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.783019] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Created directory with path [datastore2] vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1890.783243] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Fetch image to [datastore2] vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1890.783420] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1890.784156] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6637b77-05e3-4bea-9380-f78e431e4963 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.790708] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea01ddc-f155-4efc-b020-07f562d77cca {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.801017] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e335ae0f-193b-4bba-9819-66ff8c3a9b73 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.832999] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7cbd73-c7b9-450b-b2f5-833b2b0c5457 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.839492] env[61440]: DEBUG oslo_vmware.api [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065419} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.840869] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1890.841077] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1890.841258] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1890.841433] env[61440]: INFO nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1890.843376] env[61440]: DEBUG nova.compute.claims [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1890.843546] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.843752] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.846214] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b18f5607-f997-4798-be48-bda51293afd3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.867105] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1891.021365] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1891.080060] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1891.080311] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1891.100511] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc7dd8b-41bb-448d-8b02-a2b17323d48b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.108025] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a7bda7-095f-4520-a404-3b985624c917 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.137698] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6406f7fd-7973-4b7c-b152-53b8523a6370 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.144521] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22da4d9c-0c2c-4a69-bfdd-f74316cd6ac8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.157955] env[61440]: DEBUG nova.compute.provider_tree [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.167196] env[61440]: DEBUG nova.scheduler.client.report [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1891.182283] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.338s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.182805] env[61440]: ERROR nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.182805] env[61440]: Faults: ['InvalidArgument'] [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Traceback (most recent call last): [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self.driver.spawn(context, instance, image_meta, [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self._fetch_image_if_missing(context, vi) [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] image_cache(vi, tmp_image_ds_loc) [ 1891.182805] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] vm_util.copy_virtual_disk( [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] session._wait_for_task(vmdk_copy_task) [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] return self.wait_for_task(task_ref) [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] return evt.wait() [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] result = hub.switch() [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] return self.greenlet.switch() [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1891.183121] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] self.f(*self.args, **self.kw) [ 1891.183406] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1891.183406] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] raise exceptions.translate_fault(task_info.error) [ 1891.183406] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.183406] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Faults: ['InvalidArgument'] [ 1891.183406] env[61440]: ERROR nova.compute.manager [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] [ 1891.183528] env[61440]: DEBUG nova.compute.utils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1891.185011] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Build of instance 2486ea17-09bd-410d-a96d-bc863c3354e2 was re-scheduled: A specified parameter was not correct: fileType [ 1891.185011] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1891.185415] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1891.185591] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1891.185760] env[61440]: DEBUG nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1891.185926] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1891.572657] env[61440]: DEBUG nova.network.neutron [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.585137] env[61440]: INFO nova.compute.manager [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Took 0.40 seconds to deallocate network for instance. [ 1891.702197] env[61440]: INFO nova.scheduler.client.report [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleted allocations for instance 2486ea17-09bd-410d-a96d-bc863c3354e2 [ 1891.720646] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f1f247c4-f338-4db0-84c7-2328a5525a52 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 692.895s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.721568] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 496.376s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.721848] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "2486ea17-09bd-410d-a96d-bc863c3354e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.722105] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.722323] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.724453] env[61440]: INFO nova.compute.manager [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Terminating instance [ 1891.726341] env[61440]: DEBUG nova.compute.manager [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1891.726542] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1891.727238] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a150710-334f-4ae5-8ab0-2b5ae29561a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.736944] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a8aabb-120c-452c-bb54-a308ccbeef85 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.748707] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1891.769480] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2486ea17-09bd-410d-a96d-bc863c3354e2 could not be found. [ 1891.769731] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1891.769984] env[61440]: INFO nova.compute.manager [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1891.770337] env[61440]: DEBUG oslo.service.loopingcall [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.770627] env[61440]: DEBUG nova.compute.manager [-] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1891.770757] env[61440]: DEBUG nova.network.neutron [-] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1891.794853] env[61440]: DEBUG nova.network.neutron [-] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.800742] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.801049] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.802943] env[61440]: INFO nova.compute.claims [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1891.806208] env[61440]: INFO nova.compute.manager [-] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] Took 0.04 seconds to deallocate network for instance. [ 1891.946152] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5b30b251-5fb1-457b-b2d5-a4057d373804 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.224s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.947396] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 117.903s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.947500] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2486ea17-09bd-410d-a96d-bc863c3354e2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1891.947699] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "2486ea17-09bd-410d-a96d-bc863c3354e2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.032030] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cde5517-5464-44cb-88bc-3b98c81faeec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.040010] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e342e026-7f6d-4483-a500-9a351f2f622d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.068402] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1fc6c5-dedf-4ac0-9566-5fa1fd44085d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.074884] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f343a0e-4168-478d-805f-6ce6219a23ae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.087290] env[61440]: DEBUG nova.compute.provider_tree [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.096386] env[61440]: DEBUG nova.scheduler.client.report [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.109605] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.309s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.110076] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1892.142164] env[61440]: DEBUG nova.compute.utils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1892.143516] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1892.143686] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1892.152607] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1892.207820] env[61440]: DEBUG nova.policy [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '719d773060694d48aacfb9fe21f9c8ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebf7143ce68b47bfb93e66b2aa5cc890', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1892.217641] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1892.242297] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1892.242542] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1892.242699] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1892.242879] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1892.243037] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1892.243192] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1892.243401] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1892.243561] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1892.243724] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1892.243883] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1892.244087] env[61440]: DEBUG nova.virt.hardware [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1892.244929] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebb1c0b-aa03-4d56-b80d-fb879cea9f22 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.252757] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c47126-cd70-4750-b0ee-5fe4d3df3167 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.504768] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Successfully created port: 0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1893.219103] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Successfully updated port: 0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1893.232101] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "refresh_cache-abbd5d5b-7821-435d-ac56-0d070ff08043" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.232306] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "refresh_cache-abbd5d5b-7821-435d-ac56-0d070ff08043" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.232425] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1893.274650] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.275945] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1893.438023] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Updating instance_info_cache with network_info: [{"id": "0b0ab2f7-62f1-4e10-bc25-b5878fa34f63", "address": "fa:16:3e:c0:a9:2f", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b0ab2f7-62", "ovs_interfaceid": "0b0ab2f7-62f1-4e10-bc25-b5878fa34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.449665] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "refresh_cache-abbd5d5b-7821-435d-ac56-0d070ff08043" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.449952] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance network_info: |[{"id": "0b0ab2f7-62f1-4e10-bc25-b5878fa34f63", "address": "fa:16:3e:c0:a9:2f", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b0ab2f7-62", "ovs_interfaceid": "0b0ab2f7-62f1-4e10-bc25-b5878fa34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1893.450394] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:a9:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b0ab2f7-62f1-4e10-bc25-b5878fa34f63', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1893.458448] env[61440]: DEBUG oslo.service.loopingcall [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.458912] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1893.459156] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15737aa9-58e5-4cbf-930f-19f28f74f3b6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.479653] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1893.479653] env[61440]: value = "task-4281395" [ 1893.479653] env[61440]: _type = "Task" [ 1893.479653] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.487492] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281395, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.615944] env[61440]: DEBUG nova.compute.manager [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Received event network-vif-plugged-0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1893.616191] env[61440]: DEBUG oslo_concurrency.lockutils [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] Acquiring lock "abbd5d5b-7821-435d-ac56-0d070ff08043-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.617708] env[61440]: DEBUG oslo_concurrency.lockutils [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.617708] env[61440]: DEBUG oslo_concurrency.lockutils [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.617708] env[61440]: DEBUG nova.compute.manager [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] No waiting events found dispatching network-vif-plugged-0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1893.617708] env[61440]: WARNING nova.compute.manager [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Received unexpected event network-vif-plugged-0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 for instance with vm_state building and task_state spawning. [ 1893.618441] env[61440]: DEBUG nova.compute.manager [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Received event network-changed-0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1893.618662] env[61440]: DEBUG nova.compute.manager [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Refreshing instance network info cache due to event network-changed-0b0ab2f7-62f1-4e10-bc25-b5878fa34f63. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1893.618858] env[61440]: DEBUG oslo_concurrency.lockutils [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] Acquiring lock "refresh_cache-abbd5d5b-7821-435d-ac56-0d070ff08043" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.618994] env[61440]: DEBUG oslo_concurrency.lockutils [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] Acquired lock "refresh_cache-abbd5d5b-7821-435d-ac56-0d070ff08043" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.619165] env[61440]: DEBUG nova.network.neutron [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Refreshing network info cache for port 0b0ab2f7-62f1-4e10-bc25-b5878fa34f63 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1893.908086] env[61440]: DEBUG nova.network.neutron [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Updated VIF entry in instance network info cache for port 0b0ab2f7-62f1-4e10-bc25-b5878fa34f63. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1893.908480] env[61440]: DEBUG nova.network.neutron [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Updating instance_info_cache with network_info: [{"id": "0b0ab2f7-62f1-4e10-bc25-b5878fa34f63", "address": "fa:16:3e:c0:a9:2f", "network": {"id": "083568ec-f839-4973-b087-e6ce512e8f66", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1637859812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebf7143ce68b47bfb93e66b2aa5cc890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b0ab2f7-62", "ovs_interfaceid": "0b0ab2f7-62f1-4e10-bc25-b5878fa34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.920047] env[61440]: DEBUG oslo_concurrency.lockutils [req-a4283376-c7d9-46e4-b86c-ae047d6d3c6b req-c4774669-f335-471f-aa37-fb1056119205 service nova] Releasing lock "refresh_cache-abbd5d5b-7821-435d-ac56-0d070ff08043" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.989637] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281395, 'name': CreateVM_Task, 'duration_secs': 0.34019} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.990030] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1893.990492] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.990659] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.990976] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1893.991239] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-485bbd65-7611-42ef-805e-872abab9fe3d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.996197] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 1893.996197] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]520fe542-cb4e-1e1a-e79d-1a6306b78a8c" [ 1893.996197] env[61440]: _type = "Task" [ 1893.996197] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.012689] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.013211] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1894.013497] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.273699] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.274107] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1896.275053] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1896.275053] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1896.286855] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.287184] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.287366] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.287525] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1896.288681] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4d02e5-0adc-41e4-b866-2f3b949da5bc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.297711] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417001e0-c2b5-415b-ac1f-7c55446d8a2f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.312888] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925d9d14-ebb9-4258-8e4c-0a9f04c7d865 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.318972] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60bde3e-c956-4afa-a2e8-7e2da293a216 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.346486] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180665MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1896.346629] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.346812] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.418727] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.418889] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419030] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419163] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419312] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419433] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419556] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419673] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419768] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.419878] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1896.435723] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1896.446014] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1896.446212] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1896.446392] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1896.586106] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11c5d41-4e1d-4b09-819d-a7586fffba2a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.593583] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4dfe9e-2469-4ed7-a085-8cdeb7d7e383 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.623196] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd71c536-29a3-49b3-b29f-479eaf1cca92 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.629644] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d3890-53f9-4f7e-864d-8183e91231dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.641928] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.650139] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1896.665638] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1896.665810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.319s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.660650] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.737438] env[61440]: WARNING oslo_vmware.rw_handles [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1939.737438] env[61440]: ERROR oslo_vmware.rw_handles [ 1939.738065] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1939.739897] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1939.740232] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Copying Virtual Disk [datastore2] vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/1e55c7d5-dc8c-4fa6-98d7-6eab5d69f5d6/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1939.740637] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f48bc994-af54-42ef-92f5-a4778ef48183 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.748725] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Waiting for the task: (returnval){ [ 1939.748725] env[61440]: value = "task-4281396" [ 1939.748725] env[61440]: _type = "Task" [ 1939.748725] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.756660] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Task: {'id': task-4281396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.259763] env[61440]: DEBUG oslo_vmware.exceptions [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1940.260077] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.260653] env[61440]: ERROR nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1940.260653] env[61440]: Faults: ['InvalidArgument'] [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Traceback (most recent call last): [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] yield resources [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self.driver.spawn(context, instance, image_meta, [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self._fetch_image_if_missing(context, vi) [ 1940.260653] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] image_cache(vi, tmp_image_ds_loc) [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] vm_util.copy_virtual_disk( [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] session._wait_for_task(vmdk_copy_task) [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] return self.wait_for_task(task_ref) [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] return evt.wait() [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] result = hub.switch() [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1940.260971] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] return self.greenlet.switch() [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self.f(*self.args, **self.kw) [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] raise exceptions.translate_fault(task_info.error) [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Faults: ['InvalidArgument'] [ 1940.261359] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] [ 1940.261359] env[61440]: INFO nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Terminating instance [ 1940.262563] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.262772] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1940.263026] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61a6020a-06ad-4cd2-af00-539bc3ee1209 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.265157] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1940.265350] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1940.266087] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722e5f97-0ec9-4f90-b4e3-a29939e92279 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.272639] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1940.273655] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11aef78b-5b65-4f21-beac-e7c1b54bd319 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.274967] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1940.275155] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1940.275821] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f902f229-4e73-411d-a57f-bd0af1919804 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.280530] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1940.280530] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52df49b2-027b-16d8-d023-93919e44a363" [ 1940.280530] env[61440]: _type = "Task" [ 1940.280530] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.287501] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52df49b2-027b-16d8-d023-93919e44a363, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.349644] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1940.349853] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1940.350064] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Deleting the datastore file [datastore2] ccdd9481-6f4b-4a84-9f05-a4709b6615d9 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1940.350347] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2077a526-e8c7-4856-b5de-4388f51f9717 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.356841] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Waiting for the task: (returnval){ [ 1940.356841] env[61440]: value = "task-4281398" [ 1940.356841] env[61440]: _type = "Task" [ 1940.356841] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.364087] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Task: {'id': task-4281398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.791853] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1940.792202] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating directory with path [datastore2] vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1940.792407] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-381a7b5c-642a-48e5-b800-93d30f41ce77 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.805525] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Created directory with path [datastore2] vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1940.805701] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Fetch image to [datastore2] vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1940.805884] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1940.806642] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513a68b7-8b3d-454a-97e4-67ba2dc2f4a3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.813366] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2a0593-1616-4a5b-861b-fcb1af3882fb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.822132] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45001d0d-0367-4b04-bff6-40c9992197e9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.851454] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1302f26b-7acc-4dd7-ae2c-33f2022a5af8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.856741] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6a947ab1-3cf2-4fa6-8d7a-6c749f13c15e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.866092] env[61440]: DEBUG oslo_vmware.api [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Task: {'id': task-4281398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066183} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.866325] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1940.866519] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1940.866691] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1940.866865] env[61440]: INFO nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1940.868954] env[61440]: DEBUG nova.compute.claims [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1940.869142] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.869353] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.882497] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1940.997529] env[61440]: DEBUG oslo_vmware.rw_handles [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1941.056544] env[61440]: DEBUG oslo_vmware.rw_handles [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1941.056731] env[61440]: DEBUG oslo_vmware.rw_handles [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1941.123611] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108b8523-c744-4c6f-8d90-dfd3fece08f1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.131134] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04e5051-6406-499d-95d7-35e46baa11ae {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.160681] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60465b6-31bb-42b6-86ce-a37cb0cebe7c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.167839] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f701246e-4834-43f2-b025-b7505081dfea {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.181018] env[61440]: DEBUG nova.compute.provider_tree [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1941.191214] env[61440]: DEBUG nova.scheduler.client.report [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1941.210530] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.211108] env[61440]: ERROR nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1941.211108] env[61440]: Faults: ['InvalidArgument'] [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Traceback (most recent call last): [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self.driver.spawn(context, instance, image_meta, [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self._fetch_image_if_missing(context, vi) [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] image_cache(vi, tmp_image_ds_loc) [ 1941.211108] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] vm_util.copy_virtual_disk( [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] session._wait_for_task(vmdk_copy_task) [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] return self.wait_for_task(task_ref) [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] return evt.wait() [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] result = hub.switch() [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] return self.greenlet.switch() [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1941.211386] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] self.f(*self.args, **self.kw) [ 1941.211650] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1941.211650] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] raise exceptions.translate_fault(task_info.error) [ 1941.211650] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1941.211650] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Faults: ['InvalidArgument'] [ 1941.211650] env[61440]: ERROR nova.compute.manager [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] [ 1941.211898] env[61440]: DEBUG nova.compute.utils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1941.213498] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Build of instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 was re-scheduled: A specified parameter was not correct: fileType [ 1941.213498] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1941.213886] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1941.214077] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1941.214260] env[61440]: DEBUG nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1941.214428] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1941.589347] env[61440]: DEBUG nova.network.neutron [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.603419] env[61440]: INFO nova.compute.manager [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Took 0.39 seconds to deallocate network for instance. [ 1941.710174] env[61440]: INFO nova.scheduler.client.report [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Deleted allocations for instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 [ 1941.732591] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a620f9af-844f-4221-a7b7-61ed5e04555e tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 683.205s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.734412] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 487.395s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.734685] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Acquiring lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.734938] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.735275] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.737298] env[61440]: INFO nova.compute.manager [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Terminating instance [ 1941.739145] env[61440]: DEBUG nova.compute.manager [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1941.739373] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1941.740082] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c7f2388-b71f-46b7-82d4-2876eb702ef2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.751306] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e8334d-940b-4aac-a03b-111dd7da4861 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.763226] env[61440]: DEBUG nova.compute.manager [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1941.787045] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ccdd9481-6f4b-4a84-9f05-a4709b6615d9 could not be found. [ 1941.787045] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1941.787045] env[61440]: INFO nova.compute.manager [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1941.787316] env[61440]: DEBUG oslo.service.loopingcall [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.787541] env[61440]: DEBUG nova.compute.manager [-] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1941.787640] env[61440]: DEBUG nova.network.neutron [-] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1941.819555] env[61440]: DEBUG nova.network.neutron [-] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.827226] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.827491] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.829111] env[61440]: INFO nova.compute.claims [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1941.832378] env[61440]: INFO nova.compute.manager [-] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] Took 0.04 seconds to deallocate network for instance. [ 1941.961651] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ba1f93f0-a8f8-467b-b2b0-edff1be11fc3 tempest-ServerActionsTestJSON-1139651446 tempest-ServerActionsTestJSON-1139651446-project-member] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.227s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.962588] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 167.918s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.962781] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ccdd9481-6f4b-4a84-9f05-a4709b6615d9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1941.962996] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "ccdd9481-6f4b-4a84-9f05-a4709b6615d9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.077409] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb421ec-d898-42c1-9cbf-4219281b69f6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.085380] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f7365e-e2fc-41d4-896f-cdd8a7b03379 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.116872] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33cc96b-9b0a-42f7-ab5a-6bf43051d384 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.124264] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf20bd86-8e97-4090-a62f-f30a95ba4c04 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.137521] env[61440]: DEBUG nova.compute.provider_tree [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1942.150349] env[61440]: DEBUG nova.scheduler.client.report [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1942.170637] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.171356] env[61440]: DEBUG nova.compute.manager [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1942.209217] env[61440]: DEBUG nova.compute.utils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1942.210584] env[61440]: DEBUG nova.compute.manager [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1942.210759] env[61440]: DEBUG nova.network.neutron [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1942.221843] env[61440]: DEBUG nova.compute.manager [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1942.288481] env[61440]: DEBUG nova.compute.manager [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1942.311410] env[61440]: DEBUG nova.policy [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f35f4b3a2d094a93b9f8f65d766ebd4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4a61e96a8d34f76be5f32a3f9dff73b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 1942.315850] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1942.316150] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1942.316327] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1942.316592] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1942.316758] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1942.316912] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1942.317211] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1942.317306] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1942.317474] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1942.317637] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1942.317848] env[61440]: DEBUG nova.virt.hardware [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1942.318732] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bb27f6-b8bc-4b5c-af3d-7f117c180545 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.327186] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a65988-1220-4167-9cf5-1b416d85484b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.956488] env[61440]: DEBUG nova.network.neutron [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Successfully created port: 9342b0a0-4182-4b15-b4ac-ec1523918138 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1944.254838] env[61440]: DEBUG nova.compute.manager [req-b32d27a2-e598-4eb2-953f-48947a521dcb req-4145558a-d6dc-4f3d-84cd-22983e5bb5a2 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Received event network-vif-plugged-9342b0a0-4182-4b15-b4ac-ec1523918138 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1944.255107] env[61440]: DEBUG oslo_concurrency.lockutils [req-b32d27a2-e598-4eb2-953f-48947a521dcb req-4145558a-d6dc-4f3d-84cd-22983e5bb5a2 service nova] Acquiring lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.255283] env[61440]: DEBUG oslo_concurrency.lockutils [req-b32d27a2-e598-4eb2-953f-48947a521dcb req-4145558a-d6dc-4f3d-84cd-22983e5bb5a2 service nova] Lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.255452] env[61440]: DEBUG oslo_concurrency.lockutils [req-b32d27a2-e598-4eb2-953f-48947a521dcb req-4145558a-d6dc-4f3d-84cd-22983e5bb5a2 service nova] Lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.255619] env[61440]: DEBUG nova.compute.manager [req-b32d27a2-e598-4eb2-953f-48947a521dcb req-4145558a-d6dc-4f3d-84cd-22983e5bb5a2 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] No waiting events found dispatching network-vif-plugged-9342b0a0-4182-4b15-b4ac-ec1523918138 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1944.255826] env[61440]: WARNING nova.compute.manager [req-b32d27a2-e598-4eb2-953f-48947a521dcb req-4145558a-d6dc-4f3d-84cd-22983e5bb5a2 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Received unexpected event network-vif-plugged-9342b0a0-4182-4b15-b4ac-ec1523918138 for instance with vm_state building and task_state spawning. [ 1944.424752] env[61440]: DEBUG nova.network.neutron [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Successfully updated port: 9342b0a0-4182-4b15-b4ac-ec1523918138 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1944.437242] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "refresh_cache-976050d9-fd71-48db-9fb5-1b244f2ae4c4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.437673] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired lock "refresh_cache-976050d9-fd71-48db-9fb5-1b244f2ae4c4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.437673] env[61440]: DEBUG nova.network.neutron [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1944.511439] env[61440]: DEBUG nova.network.neutron [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1945.023477] env[61440]: DEBUG nova.network.neutron [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Updating instance_info_cache with network_info: [{"id": "9342b0a0-4182-4b15-b4ac-ec1523918138", "address": "fa:16:3e:c9:19:00", "network": {"id": "1224c4d7-dd39-47ab-8325-0c20b78083bc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1689851426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4a61e96a8d34f76be5f32a3f9dff73b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9342b0a0-41", "ovs_interfaceid": "9342b0a0-4182-4b15-b4ac-ec1523918138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.038727] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Releasing lock "refresh_cache-976050d9-fd71-48db-9fb5-1b244f2ae4c4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.038727] env[61440]: DEBUG nova.compute.manager [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Instance network_info: |[{"id": "9342b0a0-4182-4b15-b4ac-ec1523918138", "address": "fa:16:3e:c9:19:00", "network": {"id": "1224c4d7-dd39-47ab-8325-0c20b78083bc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1689851426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4a61e96a8d34f76be5f32a3f9dff73b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9342b0a0-41", "ovs_interfaceid": "9342b0a0-4182-4b15-b4ac-ec1523918138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1945.038874] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:19:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c3e2368-4a35-4aa5-9135-23daedbbf9ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9342b0a0-4182-4b15-b4ac-ec1523918138', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1945.046421] env[61440]: DEBUG oslo.service.loopingcall [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.046866] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1945.047244] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c170a161-27ab-4cbe-99c9-3b7124694059 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.069406] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1945.069406] env[61440]: value = "task-4281399" [ 1945.069406] env[61440]: _type = "Task" [ 1945.069406] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.076507] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281399, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.252264] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.577371] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281399, 'name': CreateVM_Task, 'duration_secs': 0.303165} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.577726] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1945.578285] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.578453] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.578771] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1945.579047] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ac1292-f151-4bf6-a25a-6f9e0c4594de {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.583477] env[61440]: DEBUG oslo_vmware.api [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for the task: (returnval){ [ 1945.583477] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d813e4-9754-0df5-f2ba-3a5aeab3553d" [ 1945.583477] env[61440]: _type = "Task" [ 1945.583477] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.590781] env[61440]: DEBUG oslo_vmware.api [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52d813e4-9754-0df5-f2ba-3a5aeab3553d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.094219] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.094493] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1946.094727] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.281689] env[61440]: DEBUG nova.compute.manager [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Received event network-changed-9342b0a0-4182-4b15-b4ac-ec1523918138 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1946.281903] env[61440]: DEBUG nova.compute.manager [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Refreshing instance network info cache due to event network-changed-9342b0a0-4182-4b15-b4ac-ec1523918138. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1946.282132] env[61440]: DEBUG oslo_concurrency.lockutils [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] Acquiring lock "refresh_cache-976050d9-fd71-48db-9fb5-1b244f2ae4c4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.282277] env[61440]: DEBUG oslo_concurrency.lockutils [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] Acquired lock "refresh_cache-976050d9-fd71-48db-9fb5-1b244f2ae4c4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.282453] env[61440]: DEBUG nova.network.neutron [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Refreshing network info cache for port 9342b0a0-4182-4b15-b4ac-ec1523918138 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1946.621823] env[61440]: DEBUG nova.network.neutron [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Updated VIF entry in instance network info cache for port 9342b0a0-4182-4b15-b4ac-ec1523918138. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1946.622207] env[61440]: DEBUG nova.network.neutron [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Updating instance_info_cache with network_info: [{"id": "9342b0a0-4182-4b15-b4ac-ec1523918138", "address": "fa:16:3e:c9:19:00", "network": {"id": "1224c4d7-dd39-47ab-8325-0c20b78083bc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1689851426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4a61e96a8d34f76be5f32a3f9dff73b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9342b0a0-41", "ovs_interfaceid": "9342b0a0-4182-4b15-b4ac-ec1523918138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.631281] env[61440]: DEBUG oslo_concurrency.lockutils [req-3fc9bedb-c14b-4b35-b78d-2e266be81d96 req-1c319715-a708-41ed-8ef2-c31c2612acb5 service nova] Releasing lock "refresh_cache-976050d9-fd71-48db-9fb5-1b244f2ae4c4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.275133] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1950.275035] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1950.275299] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1950.275530] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1950.297719] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.297938] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298034] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298157] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298276] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298395] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298527] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298650] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298759] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.298904] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1950.299037] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1951.274655] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.274834] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1954.274613] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.274998] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1956.274626] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.270598] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.274307] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.274437] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.285159] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.285360] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.285531] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.285689] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1958.286904] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5ca20a-4a2f-483c-a895-ef493dabd016 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.296030] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016a4cee-6c8c-422d-bdca-e87fb0f9da4d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.309065] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f87c8a-8557-4ae3-b6f7-c2dc0fe87895 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.314896] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a557c4-10f5-49f8-9951-ec226d3db96c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.343872] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180659MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1958.344029] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.344214] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.413806] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.413968] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.414104] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.414225] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.414345] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.414462] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.414577] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.414691] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.415083] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.415083] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1958.425337] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1958.425549] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1958.425693] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1958.554820] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f8d31b-92f0-470f-b737-f909cddb35e9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.562291] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6f41a2-3a18-44cf-a17d-764c4d6799b8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.591236] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1f54eb-8fc4-4647-9d08-f6fec73bb5f1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.598416] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27afe4fa-3478-41ef-ad78-84b7cf9fd2a6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.612528] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1958.622683] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1958.639769] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1958.639966] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.296s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.636143] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.058710] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "abbd5d5b-7821-435d-ac56-0d070ff08043" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.601216] env[61440]: WARNING oslo_vmware.rw_handles [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1990.601216] env[61440]: ERROR oslo_vmware.rw_handles [ 1990.601940] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1990.603575] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1990.603818] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Copying Virtual Disk [datastore2] vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/d6144c11-052f-4fc1-a6e4-ee37840e1e58/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1990.604135] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa25b18e-50a5-437b-832a-7826d6c8fac9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.611969] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1990.611969] env[61440]: value = "task-4281400" [ 1990.611969] env[61440]: _type = "Task" [ 1990.611969] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.619693] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': task-4281400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.122969] env[61440]: DEBUG oslo_vmware.exceptions [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1991.123295] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.123850] env[61440]: ERROR nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1991.123850] env[61440]: Faults: ['InvalidArgument'] [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Traceback (most recent call last): [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] yield resources [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self.driver.spawn(context, instance, image_meta, [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self._fetch_image_if_missing(context, vi) [ 1991.123850] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] image_cache(vi, tmp_image_ds_loc) [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] vm_util.copy_virtual_disk( [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] session._wait_for_task(vmdk_copy_task) [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] return self.wait_for_task(task_ref) [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] return evt.wait() [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] result = hub.switch() [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1991.124185] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] return self.greenlet.switch() [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self.f(*self.args, **self.kw) [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] raise exceptions.translate_fault(task_info.error) [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Faults: ['InvalidArgument'] [ 1991.124693] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] [ 1991.124693] env[61440]: INFO nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Terminating instance [ 1991.125810] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.126041] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1991.126308] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7390f2e-c6c8-44e7-818c-586b47c5219a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.128820] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1991.129018] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1991.129763] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ba59ea-eba9-45b3-81ad-f8dcccc1f08b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.136876] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1991.137104] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4137637e-7fa9-4eb9-bb2d-99c8c44b83e8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.139237] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1991.139415] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1991.140445] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8357b95a-744d-487b-9f2e-1233ab467ebc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.145915] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 1991.145915] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52de40ac-0642-244f-d1f9-bbb4da7a61f0" [ 1991.145915] env[61440]: _type = "Task" [ 1991.145915] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.152897] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52de40ac-0642-244f-d1f9-bbb4da7a61f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.211812] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1991.212070] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1991.212268] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Deleting the datastore file [datastore2] ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1991.212544] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7dcae89-d65a-4af6-8820-60e34527a48d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.219173] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for the task: (returnval){ [ 1991.219173] env[61440]: value = "task-4281402" [ 1991.219173] env[61440]: _type = "Task" [ 1991.219173] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.226643] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': task-4281402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.656278] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1991.656599] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating directory with path [datastore2] vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1991.656733] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5190615a-aeab-4e49-a675-ba632210bab2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.669059] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Created directory with path [datastore2] vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1991.669253] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Fetch image to [datastore2] vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1991.669424] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1991.670161] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a582d074-2d73-4c2d-b1e8-46a3f07e60be {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.676249] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cd514c-2ac9-44c4-8f5b-b98801dd219f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.684895] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61d5100-a59e-45da-9ad4-7a32c769c18c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.714255] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be5ce13-707a-4599-a377-a68ca05002bd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.719656] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aa45342f-1e54-42fe-b10d-a8fcad803ad9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.728347] env[61440]: DEBUG oslo_vmware.api [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Task: {'id': task-4281402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081536} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.728595] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1991.728781] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1991.728985] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1991.729203] env[61440]: INFO nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1991.731309] env[61440]: DEBUG nova.compute.claims [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1991.731482] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.731693] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.743176] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1991.799542] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1991.863927] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1991.864144] env[61440]: DEBUG oslo_vmware.rw_handles [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1991.966750] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765845fd-0831-4e76-b53a-7a067f275d5e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.974114] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594cfad5-8cc5-4d63-9deb-9fb8bbdd2186 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.004322] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7d5190-9e2b-4617-a96c-4d677a688910 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.011040] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e102b8-b7d4-4efa-b5f2-d5b5e6112f6f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.023269] env[61440]: DEBUG nova.compute.provider_tree [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1992.032596] env[61440]: DEBUG nova.scheduler.client.report [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1992.045712] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.046239] env[61440]: ERROR nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1992.046239] env[61440]: Faults: ['InvalidArgument'] [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Traceback (most recent call last): [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self.driver.spawn(context, instance, image_meta, [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self._fetch_image_if_missing(context, vi) [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] image_cache(vi, tmp_image_ds_loc) [ 1992.046239] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] vm_util.copy_virtual_disk( [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] session._wait_for_task(vmdk_copy_task) [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] return self.wait_for_task(task_ref) [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] return evt.wait() [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] result = hub.switch() [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] return self.greenlet.switch() [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1992.046605] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] self.f(*self.args, **self.kw) [ 1992.046957] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1992.046957] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] raise exceptions.translate_fault(task_info.error) [ 1992.046957] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1992.046957] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Faults: ['InvalidArgument'] [ 1992.046957] env[61440]: ERROR nova.compute.manager [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] [ 1992.046957] env[61440]: DEBUG nova.compute.utils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1992.048608] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Build of instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 was re-scheduled: A specified parameter was not correct: fileType [ 1992.048608] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1992.049008] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1992.049190] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1992.049363] env[61440]: DEBUG nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1992.049529] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1992.445388] env[61440]: DEBUG nova.network.neutron [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.461027] env[61440]: INFO nova.compute.manager [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Took 0.41 seconds to deallocate network for instance. [ 1992.560381] env[61440]: INFO nova.scheduler.client.report [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Deleted allocations for instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 [ 1992.584126] env[61440]: DEBUG oslo_concurrency.lockutils [None req-b4fce07d-2148-4ff1-bdfe-db8cd5c2e1b2 tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 646.755s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.585124] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 450.481s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.585396] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Acquiring lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.585635] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.585836] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.587825] env[61440]: INFO nova.compute.manager [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Terminating instance [ 1992.589523] env[61440]: DEBUG nova.compute.manager [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1992.589769] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1992.590344] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91401c26-a89b-4d66-92d7-7de43825c870 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.599330] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4d5291-6ee9-4399-b542-9e2ab10ed207 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.609728] env[61440]: DEBUG nova.compute.manager [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1992.629735] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0 could not be found. [ 1992.629940] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1992.630129] env[61440]: INFO nova.compute.manager [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1992.630367] env[61440]: DEBUG oslo.service.loopingcall [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1992.630671] env[61440]: DEBUG nova.compute.manager [-] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1992.630773] env[61440]: DEBUG nova.network.neutron [-] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1992.656648] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.656903] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.658315] env[61440]: INFO nova.compute.claims [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1992.664515] env[61440]: DEBUG nova.network.neutron [-] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.671262] env[61440]: INFO nova.compute.manager [-] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] Took 0.04 seconds to deallocate network for instance. [ 1992.797671] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f5f38c25-a6cf-46f8-9b6d-a11197c6f4cf tempest-AttachVolumeShelveTestJSON-1708115900 tempest-AttachVolumeShelveTestJSON-1708115900-project-member] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.212s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.798518] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 218.754s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.798796] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1992.799090] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "ecd1b8a1-7104-44fa-bd60-6521ef9e1fd0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.880036] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dab3623-56f5-47c6-bb93-c7a7e3e065c0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.888316] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e56e5e-f0d9-4ed8-a57b-28d93efbb455 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.919252] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0f568c-012e-4824-88f9-aa2a3e2892c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.926735] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34303f5-582b-4cae-aa1b-fa6317e7c685 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.939689] env[61440]: DEBUG nova.compute.provider_tree [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1992.948422] env[61440]: DEBUG nova.scheduler.client.report [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1992.962385] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.962820] env[61440]: DEBUG nova.compute.manager [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1993.017538] env[61440]: DEBUG nova.compute.utils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1993.018877] env[61440]: DEBUG nova.compute.manager [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Not allocating networking since 'none' was specified. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1993.028341] env[61440]: DEBUG nova.compute.manager [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1993.086812] env[61440]: DEBUG nova.compute.manager [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1993.112206] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1993.112468] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1993.112629] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1993.112812] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1993.112959] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1993.113122] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1993.113329] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1993.113487] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1993.113653] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1993.113815] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1993.113988] env[61440]: DEBUG nova.virt.hardware [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1993.114841] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643a7c07-1fc5-4831-9ffb-5f977e2e1eb7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.122511] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccc26bc-66b8-4b43-926c-846d7ead97ff {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.135392] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Instance VIF info [] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1993.140894] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Creating folder: Project (2b791a1ab8274adfbbad1bf2305da8b8). Parent ref: group-v843372. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1993.141168] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45e5a69a-41c2-4c6f-a5cd-115df33ef028 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.150766] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Created folder: Project (2b791a1ab8274adfbbad1bf2305da8b8) in parent group-v843372. [ 1993.150945] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Creating folder: Instances. Parent ref: group-v843479. {{(pid=61440) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1993.151166] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85d844ca-b36e-4813-a882-b7bf0dad128e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.159490] env[61440]: INFO nova.virt.vmwareapi.vm_util [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Created folder: Instances in parent group-v843479. [ 1993.159714] env[61440]: DEBUG oslo.service.loopingcall [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.159893] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1993.160096] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3705cb2f-01de-4645-a9cd-24516fe5740d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.175936] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1993.175936] env[61440]: value = "task-4281405" [ 1993.175936] env[61440]: _type = "Task" [ 1993.175936] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.182706] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281405, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.686272] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281405, 'name': CreateVM_Task, 'duration_secs': 0.237623} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.686583] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1993.686994] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1993.687232] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1993.687592] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1993.687891] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63c26811-7cfd-4d23-8817-db08b84f1abb {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.692795] env[61440]: DEBUG oslo_vmware.api [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Waiting for the task: (returnval){ [ 1993.692795] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5271e07e-0ecd-2198-b15a-68775a0b8ccb" [ 1993.692795] env[61440]: _type = "Task" [ 1993.692795] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.700818] env[61440]: DEBUG oslo_vmware.api [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5271e07e-0ecd-2198-b15a-68775a0b8ccb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.203139] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.203139] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1994.203139] env[61440]: DEBUG oslo_concurrency.lockutils [None req-60c3f5b5-ba3b-42f4-ad7f-444eefb85790 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1996.930309] env[61440]: DEBUG oslo_concurrency.lockutils [None req-16c5f7f1-6271-4d88-bab5-a4828c322f77 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.679848] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "04558d4f-12c4-461a-93f9-64a32618f3d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.680152] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "04558d4f-12c4-461a-93f9-64a32618f3d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.274471] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.274677] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances with incomplete migration {{(pid=61440) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2010.282743] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2011.275043] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2011.275300] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2011.275443] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2011.297743] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298077] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298077] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298189] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298264] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298430] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298560] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298681] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298799] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.298952] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2011.299086] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2013.274753] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.275161] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2015.274432] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.274309] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.275666] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.275506] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.270711] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.274375] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.285760] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.286034] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.286208] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.286392] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2020.287525] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214bcff2-18d0-4478-ab9f-87527c0a74dc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.296785] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a03d248-c4a1-4db1-8a8f-815b7eb61dbd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.310909] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4c2661-90cc-4c7f-b2b7-ce5e2b413d79 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.317204] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed534a6-734b-49b1-99b4-099d64d652ba {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.344912] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180650MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2020.345071] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.345264] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.480661] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance c307f560-e474-441f-b099-53c2fd290488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.480831] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.480959] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481108] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481233] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481360] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481472] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481587] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481701] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.481811] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2020.492892] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2020.493127] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2020.493275] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2020.508620] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing inventories for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2020.523751] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating ProviderTree inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2020.523929] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2020.534202] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing aggregate associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, aggregates: None {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2020.550490] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing trait associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2020.669410] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa75d8d8-1d85-43c4-bb98-3026f79c5ec2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.677057] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bcc0f3-3945-453a-8dc3-d11f5208f827 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.705938] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9e55fe-74e3-40e6-95f6-5b76bfa5942f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.712771] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec44ae9-7177-4131-85e8-fbe2b87ae58a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.725304] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.734787] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2020.749458] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2020.749635] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.404s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.275251] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.283622] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.283922] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2031.294127] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] There are 0 instances to clean {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2039.772834] env[61440]: WARNING oslo_vmware.rw_handles [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2039.772834] env[61440]: ERROR oslo_vmware.rw_handles [ 2039.773517] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2039.775448] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2039.775709] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Copying Virtual Disk [datastore2] vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/08ef0b40-02e0-4525-9b82-5961dcdcf440/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2039.775996] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b4d3d4d-31a7-44db-ad47-ecdbbd0a052a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.784377] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 2039.784377] env[61440]: value = "task-4281406" [ 2039.784377] env[61440]: _type = "Task" [ 2039.784377] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.791494] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': task-4281406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.294465] env[61440]: DEBUG oslo_vmware.exceptions [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2040.294683] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.295233] env[61440]: ERROR nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2040.295233] env[61440]: Faults: ['InvalidArgument'] [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] Traceback (most recent call last): [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] yield resources [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self.driver.spawn(context, instance, image_meta, [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self._fetch_image_if_missing(context, vi) [ 2040.295233] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] image_cache(vi, tmp_image_ds_loc) [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] vm_util.copy_virtual_disk( [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] session._wait_for_task(vmdk_copy_task) [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] return self.wait_for_task(task_ref) [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] return evt.wait() [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] result = hub.switch() [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2040.295592] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] return self.greenlet.switch() [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self.f(*self.args, **self.kw) [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] raise exceptions.translate_fault(task_info.error) [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] Faults: ['InvalidArgument'] [ 2040.295897] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] [ 2040.295897] env[61440]: INFO nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Terminating instance [ 2040.297618] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.297618] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2040.297618] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea492346-3da6-4a29-9304-3a25925e30e3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.299896] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2040.300099] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2040.300794] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf447ca-055c-43c6-8c06-226bd2360681 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.307417] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2040.307674] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b32febe-757a-4c3b-a06d-f75a2dd405ed {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.309721] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2040.309971] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2040.310899] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f610a10-fee3-4bdf-b8ba-ae01bc164ed0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.315803] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for the task: (returnval){ [ 2040.315803] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5214e74a-2abd-127b-fbe9-c68d40be0fe7" [ 2040.315803] env[61440]: _type = "Task" [ 2040.315803] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.323082] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]5214e74a-2abd-127b-fbe9-c68d40be0fe7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.371982] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2040.372236] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2040.372407] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Deleting the datastore file [datastore2] c307f560-e474-441f-b099-53c2fd290488 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2040.372669] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9cd9817-361f-488f-aab4-7a8e9cabfa77 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.379236] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 2040.379236] env[61440]: value = "task-4281408" [ 2040.379236] env[61440]: _type = "Task" [ 2040.379236] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.386519] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': task-4281408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.826520] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2040.826834] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating directory with path [datastore2] vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2040.826948] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-369b239e-630a-416d-99e5-82c9eab6972f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.837560] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Created directory with path [datastore2] vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2040.837750] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Fetch image to [datastore2] vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2040.837992] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2040.838635] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cc6851-9270-4b4f-8c96-6b7efac4dda4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.844617] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0599c4-9689-4d2c-a581-051a5af4e2b5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.853211] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335a1268-2565-4555-81f3-083ef698f1b4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.885775] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c560bb42-5643-4d04-af8f-3228fa7947c6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.892387] env[61440]: DEBUG oslo_vmware.api [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': task-4281408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078018} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.893717] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2040.893901] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2040.894088] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2040.894263] env[61440]: INFO nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2040.895933] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7e83058b-b1a2-4e4b-ab45-43c5a27532c0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.897741] env[61440]: DEBUG nova.compute.claims [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2040.897905] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.898128] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.920554] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2040.990065] env[61440]: DEBUG oslo_vmware.rw_handles [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2041.049165] env[61440]: DEBUG oslo_vmware.rw_handles [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2041.049374] env[61440]: DEBUG oslo_vmware.rw_handles [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2041.131330] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605ff30b-016f-42b9-aa14-62d88b953079 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.138995] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d52a1e2-9ad4-45ed-9122-88feb035f232 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.167659] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b3cb24-ab0d-4c09-aef7-519fc1895766 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.174624] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a901330-67d1-4c20-a3d6-8bf638d92345 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.187409] env[61440]: DEBUG nova.compute.provider_tree [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2041.196416] env[61440]: DEBUG nova.scheduler.client.report [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2041.209931] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.210475] env[61440]: ERROR nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2041.210475] env[61440]: Faults: ['InvalidArgument'] [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] Traceback (most recent call last): [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self.driver.spawn(context, instance, image_meta, [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self._fetch_image_if_missing(context, vi) [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] image_cache(vi, tmp_image_ds_loc) [ 2041.210475] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] vm_util.copy_virtual_disk( [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] session._wait_for_task(vmdk_copy_task) [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] return self.wait_for_task(task_ref) [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] return evt.wait() [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] result = hub.switch() [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] return self.greenlet.switch() [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2041.210844] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] self.f(*self.args, **self.kw) [ 2041.211199] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2041.211199] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] raise exceptions.translate_fault(task_info.error) [ 2041.211199] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2041.211199] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] Faults: ['InvalidArgument'] [ 2041.211199] env[61440]: ERROR nova.compute.manager [instance: c307f560-e474-441f-b099-53c2fd290488] [ 2041.211199] env[61440]: DEBUG nova.compute.utils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2041.212582] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Build of instance c307f560-e474-441f-b099-53c2fd290488 was re-scheduled: A specified parameter was not correct: fileType [ 2041.212582] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2041.212948] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2041.213144] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2041.213316] env[61440]: DEBUG nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2041.213515] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2041.227185] env[61440]: DEBUG oslo_concurrency.lockutils [None req-57dc3f34-fe67-4694-94b4-fdda6d5b7f37 tempest-ServerShowV254Test-1897457272 tempest-ServerShowV254Test-1897457272-project-member] Acquiring lock "430f38e6-068a-4c50-b27a-24335bf7e3ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.535071] env[61440]: DEBUG nova.network.neutron [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.550474] env[61440]: INFO nova.compute.manager [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Took 0.34 seconds to deallocate network for instance. [ 2041.648965] env[61440]: INFO nova.scheduler.client.report [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Deleted allocations for instance c307f560-e474-441f-b099-53c2fd290488 [ 2041.667016] env[61440]: DEBUG oslo_concurrency.lockutils [None req-f38c67d3-58e4-47a2-81f9-87d85cc56f6a tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "c307f560-e474-441f-b099-53c2fd290488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 685.654s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.669434] env[61440]: DEBUG oslo_concurrency.lockutils [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "c307f560-e474-441f-b099-53c2fd290488" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 489.765s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.669544] env[61440]: DEBUG oslo_concurrency.lockutils [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "c307f560-e474-441f-b099-53c2fd290488-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.669746] env[61440]: DEBUG oslo_concurrency.lockutils [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "c307f560-e474-441f-b099-53c2fd290488-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.669923] env[61440]: DEBUG oslo_concurrency.lockutils [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "c307f560-e474-441f-b099-53c2fd290488-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.671902] env[61440]: INFO nova.compute.manager [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Terminating instance [ 2041.674225] env[61440]: DEBUG nova.compute.manager [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2041.674428] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2041.674732] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9de902d4-b2ab-4995-82a4-eff1a7708a3b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.684992] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1800872-37f6-41d8-ad21-0575671b0711 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.699345] env[61440]: DEBUG nova.compute.manager [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2041.720887] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c307f560-e474-441f-b099-53c2fd290488 could not be found. [ 2041.721108] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2041.721294] env[61440]: INFO nova.compute.manager [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: c307f560-e474-441f-b099-53c2fd290488] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2041.721542] env[61440]: DEBUG oslo.service.loopingcall [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2041.721778] env[61440]: DEBUG nova.compute.manager [-] [instance: c307f560-e474-441f-b099-53c2fd290488] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2041.721882] env[61440]: DEBUG nova.network.neutron [-] [instance: c307f560-e474-441f-b099-53c2fd290488] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2041.747988] env[61440]: DEBUG nova.network.neutron [-] [instance: c307f560-e474-441f-b099-53c2fd290488] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.753615] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.753852] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.755349] env[61440]: INFO nova.compute.claims [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2041.758817] env[61440]: INFO nova.compute.manager [-] [instance: c307f560-e474-441f-b099-53c2fd290488] Took 0.04 seconds to deallocate network for instance. [ 2041.860795] env[61440]: DEBUG oslo_concurrency.lockutils [None req-498fefb2-b233-40d3-a2d0-9e27d8b1fa34 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "c307f560-e474-441f-b099-53c2fd290488" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.861969] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "c307f560-e474-441f-b099-53c2fd290488" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 267.817s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.862484] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: c307f560-e474-441f-b099-53c2fd290488] During sync_power_state the instance has a pending task (deleting). Skip. [ 2041.863148] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "c307f560-e474-441f-b099-53c2fd290488" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.928690] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86077340-966a-49d8-9e62-fa0f1fee4065 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.936346] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e03000f-59a2-412b-94c5-6337585334f7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.965960] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4fb7ca-425a-4575-ae6b-16fe1ff4f12a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.972557] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef73074-09b7-44ff-9b13-460ddf269a2d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.985094] env[61440]: DEBUG nova.compute.provider_tree [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2041.994172] env[61440]: DEBUG nova.scheduler.client.report [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2042.006711] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.253s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.007170] env[61440]: DEBUG nova.compute.manager [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2042.047828] env[61440]: DEBUG nova.compute.utils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2042.049092] env[61440]: DEBUG nova.compute.manager [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2042.049263] env[61440]: DEBUG nova.network.neutron [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2042.059174] env[61440]: DEBUG nova.compute.manager [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2042.112245] env[61440]: DEBUG nova.policy [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8be059e7fab4a84b58f00f1490fdb41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4eab358aa42d42659e93d2ead48ed0a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 2042.126105] env[61440]: DEBUG nova.compute.manager [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2042.150890] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2042.151200] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2042.151396] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2042.151614] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2042.151790] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2042.151966] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2042.152236] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2042.152411] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2042.152583] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2042.152742] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2042.152909] env[61440]: DEBUG nova.virt.hardware [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2042.153753] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26acbca-924d-4631-ac48-b39c54627c64 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.161917] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8793a14-47ae-4fe8-baf2-1efdf70086fd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.437745] env[61440]: DEBUG nova.network.neutron [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Successfully created port: 47355a96-d316-4624-9612-58d52ddc35e4 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2043.183625] env[61440]: DEBUG nova.network.neutron [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Successfully updated port: 47355a96-d316-4624-9612-58d52ddc35e4 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2043.198850] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "refresh_cache-04558d4f-12c4-461a-93f9-64a32618f3d4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2043.199009] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "refresh_cache-04558d4f-12c4-461a-93f9-64a32618f3d4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2043.199172] env[61440]: DEBUG nova.network.neutron [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2043.238678] env[61440]: DEBUG nova.network.neutron [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2043.430322] env[61440]: DEBUG nova.network.neutron [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Updating instance_info_cache with network_info: [{"id": "47355a96-d316-4624-9612-58d52ddc35e4", "address": "fa:16:3e:86:78:e9", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47355a96-d3", "ovs_interfaceid": "47355a96-d316-4624-9612-58d52ddc35e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.443241] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "refresh_cache-04558d4f-12c4-461a-93f9-64a32618f3d4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.443672] env[61440]: DEBUG nova.compute.manager [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Instance network_info: |[{"id": "47355a96-d316-4624-9612-58d52ddc35e4", "address": "fa:16:3e:86:78:e9", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47355a96-d3", "ovs_interfaceid": "47355a96-d316-4624-9612-58d52ddc35e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2043.443978] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:78:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47355a96-d316-4624-9612-58d52ddc35e4', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2043.452092] env[61440]: DEBUG oslo.service.loopingcall [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2043.452445] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2043.452687] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98b432f1-8a98-477a-a638-a352ad4ef1ff {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.472254] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2043.472254] env[61440]: value = "task-4281409" [ 2043.472254] env[61440]: _type = "Task" [ 2043.472254] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.479485] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281409, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.573068] env[61440]: DEBUG nova.compute.manager [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Received event network-vif-plugged-47355a96-d316-4624-9612-58d52ddc35e4 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2043.573304] env[61440]: DEBUG oslo_concurrency.lockutils [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] Acquiring lock "04558d4f-12c4-461a-93f9-64a32618f3d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.573512] env[61440]: DEBUG oslo_concurrency.lockutils [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] Lock "04558d4f-12c4-461a-93f9-64a32618f3d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.573683] env[61440]: DEBUG oslo_concurrency.lockutils [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] Lock "04558d4f-12c4-461a-93f9-64a32618f3d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.573847] env[61440]: DEBUG nova.compute.manager [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] No waiting events found dispatching network-vif-plugged-47355a96-d316-4624-9612-58d52ddc35e4 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2043.574015] env[61440]: WARNING nova.compute.manager [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Received unexpected event network-vif-plugged-47355a96-d316-4624-9612-58d52ddc35e4 for instance with vm_state building and task_state spawning. [ 2043.574185] env[61440]: DEBUG nova.compute.manager [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Received event network-changed-47355a96-d316-4624-9612-58d52ddc35e4 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2043.574341] env[61440]: DEBUG nova.compute.manager [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Refreshing instance network info cache due to event network-changed-47355a96-d316-4624-9612-58d52ddc35e4. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2043.574521] env[61440]: DEBUG oslo_concurrency.lockutils [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] Acquiring lock "refresh_cache-04558d4f-12c4-461a-93f9-64a32618f3d4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2043.574655] env[61440]: DEBUG oslo_concurrency.lockutils [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] Acquired lock "refresh_cache-04558d4f-12c4-461a-93f9-64a32618f3d4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2043.574817] env[61440]: DEBUG nova.network.neutron [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Refreshing network info cache for port 47355a96-d316-4624-9612-58d52ddc35e4 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2043.829669] env[61440]: DEBUG nova.network.neutron [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Updated VIF entry in instance network info cache for port 47355a96-d316-4624-9612-58d52ddc35e4. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2043.830168] env[61440]: DEBUG nova.network.neutron [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Updating instance_info_cache with network_info: [{"id": "47355a96-d316-4624-9612-58d52ddc35e4", "address": "fa:16:3e:86:78:e9", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47355a96-d3", "ovs_interfaceid": "47355a96-d316-4624-9612-58d52ddc35e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.839989] env[61440]: DEBUG oslo_concurrency.lockutils [req-1fdf36db-c001-4cee-b152-b6caf094eeda req-7b763553-eed2-4554-b0e4-d64e195179d3 service nova] Releasing lock "refresh_cache-04558d4f-12c4-461a-93f9-64a32618f3d4" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.983084] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281409, 'name': CreateVM_Task} progress is 25%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.482306] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281409, 'name': CreateVM_Task, 'duration_secs': 0.619569} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.482517] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2044.483143] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.483310] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.483619] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2044.483864] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f27531f0-60d0-4937-a907-6abf8e692d40 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.488699] env[61440]: DEBUG oslo_vmware.api [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for the task: (returnval){ [ 2044.488699] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52bcfe9d-1e40-9716-e02c-208f157c79b2" [ 2044.488699] env[61440]: _type = "Task" [ 2044.488699] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.495922] env[61440]: DEBUG oslo_vmware.api [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52bcfe9d-1e40-9716-e02c-208f157c79b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.998969] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2044.999252] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2044.999518] env[61440]: DEBUG oslo_concurrency.lockutils [None req-005752e9-14f4-48e7-b782-1d7d4541a6ec tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.285926] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.275071] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.275071] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2073.275071] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2073.298854] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299167] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299167] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299270] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299391] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299528] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299666] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299803] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.299927] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.300058] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2073.300181] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2075.274491] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.274491] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.274491] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2077.274651] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.275736] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.275070] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.274433] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.287051] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.287051] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.287051] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.287215] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2081.288327] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd953810-bc35-4890-8f10-1fe61bfc0175 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.297093] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0908ab14-926e-4fe8-96b3-35c5c1fc7e87 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.310500] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf114376-3dc0-452c-9180-ec3b7a1389e0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.316486] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815f5e6b-625e-4376-9bf9-fc3917cbd13e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.343893] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180619MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2081.343983] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.344192] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.416262] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 53a5db32-d312-488e-8193-df4504736fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.416436] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.416567] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.416692] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.416815] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.416933] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.417068] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.417194] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.417310] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.417421] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2081.417646] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2081.417787] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2081.529830] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decc8ddd-0372-4562-bf63-ff9a2ecffed7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.537564] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a9600c-aa58-44c6-971c-3cdd74cb8ad2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.566579] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20222a1f-b2e5-46a1-a680-1277d254ed63 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.573137] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938a9f39-6dbb-4dcf-9610-ad121b0e39e6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.585466] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2081.594879] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2081.608217] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2081.608391] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.603930] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.029605] env[61440]: WARNING oslo_vmware.rw_handles [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2089.029605] env[61440]: ERROR oslo_vmware.rw_handles [ 2089.030213] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2089.032489] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2089.032755] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Copying Virtual Disk [datastore2] vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/b087e8c8-9835-41f7-8096-ef4d8761b486/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2089.033060] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25c5ca23-31a5-4a7a-b01e-d9bf83daf43a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.041862] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for the task: (returnval){ [ 2089.041862] env[61440]: value = "task-4281410" [ 2089.041862] env[61440]: _type = "Task" [ 2089.041862] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.050655] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': task-4281410, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.553019] env[61440]: DEBUG oslo_vmware.exceptions [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2089.553366] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2089.553922] env[61440]: ERROR nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2089.553922] env[61440]: Faults: ['InvalidArgument'] [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] Traceback (most recent call last): [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] yield resources [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self.driver.spawn(context, instance, image_meta, [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self._fetch_image_if_missing(context, vi) [ 2089.553922] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] image_cache(vi, tmp_image_ds_loc) [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] vm_util.copy_virtual_disk( [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] session._wait_for_task(vmdk_copy_task) [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] return self.wait_for_task(task_ref) [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] return evt.wait() [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] result = hub.switch() [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2089.554468] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] return self.greenlet.switch() [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self.f(*self.args, **self.kw) [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] raise exceptions.translate_fault(task_info.error) [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] Faults: ['InvalidArgument'] [ 2089.554830] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] [ 2089.554830] env[61440]: INFO nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Terminating instance [ 2089.555848] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2089.556088] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2089.556337] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e113aee4-1c00-4645-a4b0-6e8dadc1a611 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.558573] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2089.558777] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2089.559524] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891e9767-bbe4-48c1-a630-17a46ad117cd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.567032] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2089.567032] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d8fc74f-25a1-4cea-93ff-cb2d3347e904 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.568852] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2089.569049] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2089.570051] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58a176db-253c-4228-9152-4e83fb474f7a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.575031] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Waiting for the task: (returnval){ [ 2089.575031] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]521cad96-4511-a7b8-8b39-fef588715b92" [ 2089.575031] env[61440]: _type = "Task" [ 2089.575031] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.581919] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]521cad96-4511-a7b8-8b39-fef588715b92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.630662] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2089.630891] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2089.631084] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Deleting the datastore file [datastore2] 53a5db32-d312-488e-8193-df4504736fc7 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2089.631349] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b8bb150-2b0d-443e-9918-e5095e835eea {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.638284] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for the task: (returnval){ [ 2089.638284] env[61440]: value = "task-4281412" [ 2089.638284] env[61440]: _type = "Task" [ 2089.638284] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.646987] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': task-4281412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.087074] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2090.087368] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Creating directory with path [datastore2] vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2090.087589] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d628127-9832-437b-b820-498e3c893572 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.099465] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Created directory with path [datastore2] vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2090.099662] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Fetch image to [datastore2] vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2090.099842] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2090.100584] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6085c8-aa8d-4eda-81cc-30b68d2e9076 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.107034] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f1b265-b613-4c06-a720-6abf01298ab6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.115962] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef584996-26ae-463e-a001-3ab1b30d2de8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.150463] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d05d202-ec55-443a-a9c5-99b054f1f368 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.157171] env[61440]: DEBUG oslo_vmware.api [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': task-4281412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077198} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.158606] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2090.158798] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2090.158978] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2090.159171] env[61440]: INFO nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2090.161627] env[61440]: DEBUG nova.compute.claims [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2090.161801] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.162030] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.164568] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c68d77bd-2207-4c7a-9279-101837ce6314 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.184634] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2090.306073] env[61440]: DEBUG oslo_vmware.rw_handles [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2090.366319] env[61440]: DEBUG oslo_vmware.rw_handles [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2090.366508] env[61440]: DEBUG oslo_vmware.rw_handles [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2090.374564] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4355d915-a168-4e3f-8ea1-2b644653358e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.382323] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ba3dca-4b61-4d41-a2f8-f35ade8e96e7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.125508] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc70cc31-6906-4684-8b52-a047fa0438c1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.133314] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3692c47-ded0-493a-b7cd-d0a64b320b76 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.145926] env[61440]: DEBUG nova.compute.provider_tree [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2091.158644] env[61440]: DEBUG nova.scheduler.client.report [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2091.173392] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.011s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.173939] env[61440]: ERROR nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2091.173939] env[61440]: Faults: ['InvalidArgument'] [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] Traceback (most recent call last): [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self.driver.spawn(context, instance, image_meta, [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self._fetch_image_if_missing(context, vi) [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] image_cache(vi, tmp_image_ds_loc) [ 2091.173939] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] vm_util.copy_virtual_disk( [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] session._wait_for_task(vmdk_copy_task) [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] return self.wait_for_task(task_ref) [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] return evt.wait() [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] result = hub.switch() [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] return self.greenlet.switch() [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2091.174276] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] self.f(*self.args, **self.kw) [ 2091.174568] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2091.174568] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] raise exceptions.translate_fault(task_info.error) [ 2091.174568] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2091.174568] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] Faults: ['InvalidArgument'] [ 2091.174568] env[61440]: ERROR nova.compute.manager [instance: 53a5db32-d312-488e-8193-df4504736fc7] [ 2091.174704] env[61440]: DEBUG nova.compute.utils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2091.176188] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Build of instance 53a5db32-d312-488e-8193-df4504736fc7 was re-scheduled: A specified parameter was not correct: fileType [ 2091.176188] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2091.176572] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2091.176748] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2091.176917] env[61440]: DEBUG nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2091.177090] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2091.724942] env[61440]: DEBUG nova.network.neutron [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.739720] env[61440]: INFO nova.compute.manager [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Took 0.56 seconds to deallocate network for instance. [ 2091.839979] env[61440]: INFO nova.scheduler.client.report [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Deleted allocations for instance 53a5db32-d312-488e-8193-df4504736fc7 [ 2091.871053] env[61440]: DEBUG oslo_concurrency.lockutils [None req-95f1a316-1871-4fd2-8c1d-2533a66f32f8 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "53a5db32-d312-488e-8193-df4504736fc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 684.122s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.871190] env[61440]: DEBUG oslo_concurrency.lockutils [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "53a5db32-d312-488e-8193-df4504736fc7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 488.399s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.871360] env[61440]: DEBUG oslo_concurrency.lockutils [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquiring lock "53a5db32-d312-488e-8193-df4504736fc7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.872032] env[61440]: DEBUG oslo_concurrency.lockutils [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "53a5db32-d312-488e-8193-df4504736fc7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.872032] env[61440]: DEBUG oslo_concurrency.lockutils [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "53a5db32-d312-488e-8193-df4504736fc7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.874087] env[61440]: INFO nova.compute.manager [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Terminating instance [ 2091.875986] env[61440]: DEBUG nova.compute.manager [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2091.876094] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2091.876638] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d619de7c-c4fb-4144-b4d5-d87bb9df7a69 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.887293] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b20aeb4-e0f8-4565-b65a-06db11871cf1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.916401] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 53a5db32-d312-488e-8193-df4504736fc7 could not be found. [ 2091.916612] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2091.916791] env[61440]: INFO nova.compute.manager [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2091.917047] env[61440]: DEBUG oslo.service.loopingcall [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2091.917269] env[61440]: DEBUG nova.compute.manager [-] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2091.917383] env[61440]: DEBUG nova.network.neutron [-] [instance: 53a5db32-d312-488e-8193-df4504736fc7] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2091.943810] env[61440]: DEBUG nova.network.neutron [-] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.953046] env[61440]: INFO nova.compute.manager [-] [instance: 53a5db32-d312-488e-8193-df4504736fc7] Took 0.04 seconds to deallocate network for instance. [ 2092.046674] env[61440]: DEBUG oslo_concurrency.lockutils [None req-50181713-6aff-44dc-8027-71c19d52a507 tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Lock "53a5db32-d312-488e-8193-df4504736fc7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.047832] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "53a5db32-d312-488e-8193-df4504736fc7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 318.002s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.048105] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 53a5db32-d312-488e-8193-df4504736fc7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2092.048411] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "53a5db32-d312-488e-8193-df4504736fc7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.269287] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.275152] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.273911] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.274131] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2135.274261] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2135.293587] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.293944] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.293944] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294042] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294105] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294227] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294349] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294466] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294582] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2135.294699] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2135.295184] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.274900] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.275146] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2139.275972] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.276276] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.463906] env[61440]: WARNING oslo_vmware.rw_handles [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2139.463906] env[61440]: ERROR oslo_vmware.rw_handles [ 2139.464375] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2139.467309] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2139.467620] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Copying Virtual Disk [datastore2] vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/23c6a0f4-cb9b-4b4a-b75a-560147ebfd80/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2139.467949] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d32f26e2-da9f-4660-b594-6b9836b42fe0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.475770] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Waiting for the task: (returnval){ [ 2139.475770] env[61440]: value = "task-4281413" [ 2139.475770] env[61440]: _type = "Task" [ 2139.475770] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.483093] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Task: {'id': task-4281413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.986216] env[61440]: DEBUG oslo_vmware.exceptions [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2139.986379] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.986959] env[61440]: ERROR nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2139.986959] env[61440]: Faults: ['InvalidArgument'] [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Traceback (most recent call last): [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] yield resources [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self.driver.spawn(context, instance, image_meta, [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self._fetch_image_if_missing(context, vi) [ 2139.986959] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] image_cache(vi, tmp_image_ds_loc) [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] vm_util.copy_virtual_disk( [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] session._wait_for_task(vmdk_copy_task) [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] return self.wait_for_task(task_ref) [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] return evt.wait() [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] result = hub.switch() [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2139.987309] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] return self.greenlet.switch() [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self.f(*self.args, **self.kw) [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] raise exceptions.translate_fault(task_info.error) [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Faults: ['InvalidArgument'] [ 2139.987633] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] [ 2139.987633] env[61440]: INFO nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Terminating instance [ 2139.988866] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2139.989085] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2139.989331] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d8a5824-1420-41fe-a3be-b584a30bb067 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.991505] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2139.991695] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2139.992436] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323b93d4-33c8-456c-bdb6-304618ceb6b8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.998901] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2139.999127] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-def0376d-31a4-49e5-b542-49ba4ebaa5b5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.001281] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2140.001457] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2140.002408] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b71dbc6-7922-498b-a8a3-4de6d74e9a78 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.006914] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Waiting for the task: (returnval){ [ 2140.006914] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52cb1749-9d8f-42b0-0159-33c6316b63c3" [ 2140.006914] env[61440]: _type = "Task" [ 2140.006914] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.020801] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2140.021077] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Creating directory with path [datastore2] vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2140.021297] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3c4ad5b-bc6b-499c-b331-03764d72a90d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.041158] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Created directory with path [datastore2] vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2140.041345] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Fetch image to [datastore2] vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2140.041505] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2140.042255] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d490523-b19a-463c-b25b-4ad3b93f569f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.049015] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3441016e-96ad-4428-a3dc-bdf84fb80975 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.057810] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dff23e-624b-4007-8232-ca50b6cf48f0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.088668] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c1707b-483c-4f02-b00e-870ae732e236 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.091148] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2140.091347] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2140.091523] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Deleting the datastore file [datastore2] 9f2d4b43-f7ef-401b-a63d-844e113b7142 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2140.091744] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccc5f3f0-b4bb-47d6-b4fb-5396bf103a6e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.096655] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-586b4158-40e4-434b-8e82-b16521bbf7c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.099342] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Waiting for the task: (returnval){ [ 2140.099342] env[61440]: value = "task-4281415" [ 2140.099342] env[61440]: _type = "Task" [ 2140.099342] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.106219] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Task: {'id': task-4281415, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.116136] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2140.232240] env[61440]: DEBUG oslo_vmware.rw_handles [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2140.290270] env[61440]: DEBUG oslo_vmware.rw_handles [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2140.290531] env[61440]: DEBUG oslo_vmware.rw_handles [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2140.610561] env[61440]: DEBUG oslo_vmware.api [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Task: {'id': task-4281415, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074471} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.610878] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2140.611100] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2140.611283] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2140.611459] env[61440]: INFO nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2140.613501] env[61440]: DEBUG nova.compute.claims [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2140.613671] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2140.613902] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2140.776330] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ca5037-d98c-41cf-b789-bfef35fd2856 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.783718] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dc0f38-91a9-4871-a4d1-1044308db6be {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.813016] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e41eaf0-95b4-473f-b886-d928d06819fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.819673] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25f73a3-974b-48bd-9ac6-3a70807d6c8a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.832299] env[61440]: DEBUG nova.compute.provider_tree [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2140.841686] env[61440]: DEBUG nova.scheduler.client.report [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2140.855422] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.241s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.855993] env[61440]: ERROR nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2140.855993] env[61440]: Faults: ['InvalidArgument'] [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Traceback (most recent call last): [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self.driver.spawn(context, instance, image_meta, [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self._fetch_image_if_missing(context, vi) [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] image_cache(vi, tmp_image_ds_loc) [ 2140.855993] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] vm_util.copy_virtual_disk( [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] session._wait_for_task(vmdk_copy_task) [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] return self.wait_for_task(task_ref) [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] return evt.wait() [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] result = hub.switch() [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] return self.greenlet.switch() [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2140.856414] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] self.f(*self.args, **self.kw) [ 2140.856742] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2140.856742] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] raise exceptions.translate_fault(task_info.error) [ 2140.856742] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2140.856742] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Faults: ['InvalidArgument'] [ 2140.856742] env[61440]: ERROR nova.compute.manager [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] [ 2140.856742] env[61440]: DEBUG nova.compute.utils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2140.858565] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Build of instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 was re-scheduled: A specified parameter was not correct: fileType [ 2140.858565] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2140.858565] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2140.858755] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2140.858850] env[61440]: DEBUG nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2140.859022] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2141.247536] env[61440]: DEBUG nova.network.neutron [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2141.261504] env[61440]: INFO nova.compute.manager [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Took 0.40 seconds to deallocate network for instance. [ 2141.274466] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.361685] env[61440]: INFO nova.scheduler.client.report [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Deleted allocations for instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 [ 2141.384365] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5977e0a6-2d59-4881-9d08-fb7fe8231bee tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 690.226s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.384619] env[61440]: DEBUG oslo_concurrency.lockutils [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 494.361s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.384841] env[61440]: DEBUG oslo_concurrency.lockutils [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Acquiring lock "9f2d4b43-f7ef-401b-a63d-844e113b7142-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.385153] env[61440]: DEBUG oslo_concurrency.lockutils [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.385381] env[61440]: DEBUG oslo_concurrency.lockutils [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.387948] env[61440]: INFO nova.compute.manager [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Terminating instance [ 2141.389858] env[61440]: DEBUG nova.compute.manager [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2141.390074] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2141.390617] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02bd0f07-925a-43b6-aa3a-896f84de1f4f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.400900] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0014d47-24f3-4166-9d24-5ded598693fa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.431217] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9f2d4b43-f7ef-401b-a63d-844e113b7142 could not be found. [ 2141.431424] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2141.431988] env[61440]: INFO nova.compute.manager [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2141.431988] env[61440]: DEBUG oslo.service.loopingcall [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2141.432159] env[61440]: DEBUG nova.compute.manager [-] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2141.432243] env[61440]: DEBUG nova.network.neutron [-] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2141.456571] env[61440]: DEBUG nova.network.neutron [-] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2141.465936] env[61440]: INFO nova.compute.manager [-] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] Took 0.03 seconds to deallocate network for instance. [ 2141.590873] env[61440]: DEBUG oslo_concurrency.lockutils [None req-4734573c-d682-436a-9912-a2a4f3b75f74 tempest-InstanceActionsNegativeTestJSON-1803095332 tempest-InstanceActionsNegativeTestJSON-1803095332-project-member] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.593703] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 367.546s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.593703] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 9f2d4b43-f7ef-401b-a63d-844e113b7142] During sync_power_state the instance has a pending task (deleting). Skip. [ 2141.593703] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "9f2d4b43-f7ef-401b-a63d-844e113b7142" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.273963] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.285571] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.285797] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.285963] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.286135] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2142.287336] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0af9f6-3875-48d1-bc89-866dd2409df9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.296167] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b4693b-7569-46b2-b6a8-9960fe50912b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.310091] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9c0add-6701-4197-87c6-e53e80bc5798 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.316408] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad603bd-3817-493e-b384-697e56f5e0c7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.346155] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180655MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2142.346323] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.346530] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.419816] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420087] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420266] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420338] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420450] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420573] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420698] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.420844] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2142.421046] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2142.421185] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=183GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2142.525029] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c1ee66-3e87-4d13-a021-89981adbd8c9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.532750] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d5cdad-7c07-47e6-a4dc-73757dcf9d9b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.563031] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e393ed-8daa-4467-a93e-30385187ae65 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.569040] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae8879b-1de2-4830-b930-9e08ebae59d4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.581679] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.592172] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2142.609133] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2142.609522] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.263s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.082449] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "2b6d953f-c3e5-4671-9eac-61523a169b99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.082449] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "2b6d953f-c3e5-4671-9eac-61523a169b99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.092500] env[61440]: DEBUG nova.compute.manager [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2143.142825] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.143095] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.144661] env[61440]: INFO nova.compute.claims [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2143.303995] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c41fb05-829e-4c20-97d7-2ffc73e1b21f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.311233] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8038e6dd-447a-448f-bc71-46f66e457583 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.340104] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7ae188-e69a-446d-a2b4-782c75a58454 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.347612] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675278de-44c5-4987-ad32-f824001485a0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.360096] env[61440]: DEBUG nova.compute.provider_tree [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2143.368845] env[61440]: DEBUG nova.scheduler.client.report [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2143.382843] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.240s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.383307] env[61440]: DEBUG nova.compute.manager [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2143.414891] env[61440]: DEBUG nova.compute.utils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2143.416270] env[61440]: DEBUG nova.compute.manager [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2143.416440] env[61440]: DEBUG nova.network.neutron [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2143.424400] env[61440]: DEBUG nova.compute.manager [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2143.477696] env[61440]: DEBUG nova.policy [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02aa958bbbdc49ab8d494fe9afc40779', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a38fb630f3e41acbcd97dee8e89aba8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 2143.488767] env[61440]: DEBUG nova.compute.manager [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2143.513315] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2143.513575] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2143.513737] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2143.513920] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2143.514083] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2143.514240] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2143.514448] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2143.514611] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2143.514777] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2143.514944] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2143.515134] env[61440]: DEBUG nova.virt.hardware [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2143.515984] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c1b02e-2a10-4698-ac0d-37bc36f14f53 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.525410] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4ce715-5b2d-4761-9408-0e862c3ab844 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.069239] env[61440]: DEBUG nova.network.neutron [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Successfully created port: 97cc82ca-cd71-4dc4-986d-ad226e22675f {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2144.581652] env[61440]: DEBUG nova.compute.manager [req-d20c6e75-7ae4-4931-aa73-4e1d2af3fd94 req-b846015d-5247-442f-9f2b-cf8c44408091 service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Received event network-vif-plugged-97cc82ca-cd71-4dc4-986d-ad226e22675f {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2144.581891] env[61440]: DEBUG oslo_concurrency.lockutils [req-d20c6e75-7ae4-4931-aa73-4e1d2af3fd94 req-b846015d-5247-442f-9f2b-cf8c44408091 service nova] Acquiring lock "2b6d953f-c3e5-4671-9eac-61523a169b99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.582090] env[61440]: DEBUG oslo_concurrency.lockutils [req-d20c6e75-7ae4-4931-aa73-4e1d2af3fd94 req-b846015d-5247-442f-9f2b-cf8c44408091 service nova] Lock "2b6d953f-c3e5-4671-9eac-61523a169b99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.582269] env[61440]: DEBUG oslo_concurrency.lockutils [req-d20c6e75-7ae4-4931-aa73-4e1d2af3fd94 req-b846015d-5247-442f-9f2b-cf8c44408091 service nova] Lock "2b6d953f-c3e5-4671-9eac-61523a169b99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.582438] env[61440]: DEBUG nova.compute.manager [req-d20c6e75-7ae4-4931-aa73-4e1d2af3fd94 req-b846015d-5247-442f-9f2b-cf8c44408091 service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] No waiting events found dispatching network-vif-plugged-97cc82ca-cd71-4dc4-986d-ad226e22675f {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2144.582602] env[61440]: WARNING nova.compute.manager [req-d20c6e75-7ae4-4931-aa73-4e1d2af3fd94 req-b846015d-5247-442f-9f2b-cf8c44408091 service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Received unexpected event network-vif-plugged-97cc82ca-cd71-4dc4-986d-ad226e22675f for instance with vm_state building and task_state spawning. [ 2144.604626] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.664630] env[61440]: DEBUG nova.network.neutron [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Successfully updated port: 97cc82ca-cd71-4dc4-986d-ad226e22675f {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2144.678384] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "refresh_cache-2b6d953f-c3e5-4671-9eac-61523a169b99" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.678492] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "refresh_cache-2b6d953f-c3e5-4671-9eac-61523a169b99" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.678629] env[61440]: DEBUG nova.network.neutron [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2144.719350] env[61440]: DEBUG nova.network.neutron [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2144.917251] env[61440]: DEBUG nova.network.neutron [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Updating instance_info_cache with network_info: [{"id": "97cc82ca-cd71-4dc4-986d-ad226e22675f", "address": "fa:16:3e:1b:3c:34", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97cc82ca-cd", "ovs_interfaceid": "97cc82ca-cd71-4dc4-986d-ad226e22675f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.935085] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "refresh_cache-2b6d953f-c3e5-4671-9eac-61523a169b99" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.935401] env[61440]: DEBUG nova.compute.manager [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Instance network_info: |[{"id": "97cc82ca-cd71-4dc4-986d-ad226e22675f", "address": "fa:16:3e:1b:3c:34", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97cc82ca-cd", "ovs_interfaceid": "97cc82ca-cd71-4dc4-986d-ad226e22675f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2144.935856] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:3c:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '97cc82ca-cd71-4dc4-986d-ad226e22675f', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2144.943808] env[61440]: DEBUG oslo.service.loopingcall [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2144.944309] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2144.944569] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81d563f9-55f3-4e57-9815-f140cf124c9f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.964746] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2144.964746] env[61440]: value = "task-4281416" [ 2144.964746] env[61440]: _type = "Task" [ 2144.964746] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.972530] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281416, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.475681] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281416, 'name': CreateVM_Task, 'duration_secs': 0.344631} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.475859] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2145.476551] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.476718] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.477079] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2145.477333] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e9fc5ca-543f-4dd5-9640-e4fa184dffa5 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.481686] env[61440]: DEBUG oslo_vmware.api [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 2145.481686] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]528a9443-e2af-13c6-3c7a-a64e42f1a734" [ 2145.481686] env[61440]: _type = "Task" [ 2145.481686] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.489098] env[61440]: DEBUG oslo_vmware.api [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]528a9443-e2af-13c6-3c7a-a64e42f1a734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.991838] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.992200] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2145.992323] env[61440]: DEBUG oslo_concurrency.lockutils [None req-d556d4f4-220f-4a5d-bd80-7b556debb726 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.612661] env[61440]: DEBUG nova.compute.manager [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Received event network-changed-97cc82ca-cd71-4dc4-986d-ad226e22675f {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2146.612863] env[61440]: DEBUG nova.compute.manager [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Refreshing instance network info cache due to event network-changed-97cc82ca-cd71-4dc4-986d-ad226e22675f. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2146.613088] env[61440]: DEBUG oslo_concurrency.lockutils [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] Acquiring lock "refresh_cache-2b6d953f-c3e5-4671-9eac-61523a169b99" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.613268] env[61440]: DEBUG oslo_concurrency.lockutils [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] Acquired lock "refresh_cache-2b6d953f-c3e5-4671-9eac-61523a169b99" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.613440] env[61440]: DEBUG nova.network.neutron [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Refreshing network info cache for port 97cc82ca-cd71-4dc4-986d-ad226e22675f {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2146.870708] env[61440]: DEBUG nova.network.neutron [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Updated VIF entry in instance network info cache for port 97cc82ca-cd71-4dc4-986d-ad226e22675f. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2146.871112] env[61440]: DEBUG nova.network.neutron [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Updating instance_info_cache with network_info: [{"id": "97cc82ca-cd71-4dc4-986d-ad226e22675f", "address": "fa:16:3e:1b:3c:34", "network": {"id": "6a5089bf-3ea9-496b-8663-befccab3b1fc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1500060981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a38fb630f3e41acbcd97dee8e89aba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97cc82ca-cd", "ovs_interfaceid": "97cc82ca-cd71-4dc4-986d-ad226e22675f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.880146] env[61440]: DEBUG oslo_concurrency.lockutils [req-587d8518-84bb-4834-8688-f4a02c6d4236 req-7aa3de58-8655-4f0f-821d-27ee162c887e service nova] Releasing lock "refresh_cache-2b6d953f-c3e5-4671-9eac-61523a169b99" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.663267] env[61440]: WARNING oslo_vmware.rw_handles [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2185.663267] env[61440]: ERROR oslo_vmware.rw_handles [ 2185.663910] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2185.665672] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2185.665943] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Copying Virtual Disk [datastore2] vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/cc93d737-f646-4498-a046-4910f630461b/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2185.666250] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b52a8d3-212a-4e2e-99b7-12ae51b0bd9b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.673823] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Waiting for the task: (returnval){ [ 2185.673823] env[61440]: value = "task-4281417" [ 2185.673823] env[61440]: _type = "Task" [ 2185.673823] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.681360] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Task: {'id': task-4281417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.184269] env[61440]: DEBUG oslo_vmware.exceptions [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2186.184553] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.185116] env[61440]: ERROR nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2186.185116] env[61440]: Faults: ['InvalidArgument'] [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Traceback (most recent call last): [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] yield resources [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self.driver.spawn(context, instance, image_meta, [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self._fetch_image_if_missing(context, vi) [ 2186.185116] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] image_cache(vi, tmp_image_ds_loc) [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] vm_util.copy_virtual_disk( [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] session._wait_for_task(vmdk_copy_task) [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] return self.wait_for_task(task_ref) [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] return evt.wait() [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] result = hub.switch() [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2186.185545] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] return self.greenlet.switch() [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self.f(*self.args, **self.kw) [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] raise exceptions.translate_fault(task_info.error) [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Faults: ['InvalidArgument'] [ 2186.185992] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] [ 2186.185992] env[61440]: INFO nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Terminating instance [ 2186.186978] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.187215] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2186.187450] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ceb1397e-de0f-4c31-a282-42edaa8eac4b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.189535] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2186.189730] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2186.190471] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb24e1c-8552-4bca-8e51-c1a1a7c8839c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.197204] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2186.198174] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74a1c956-51c3-45af-a1b4-95a3fc55b556 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.199483] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2186.199658] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2186.200350] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46b90683-b059-4b91-8b62-fc15c1f96c07 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.204834] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Waiting for the task: (returnval){ [ 2186.204834] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ad8855-8f6b-5ec8-fd0a-1385eb174119" [ 2186.204834] env[61440]: _type = "Task" [ 2186.204834] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.211747] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ad8855-8f6b-5ec8-fd0a-1385eb174119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.263062] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2186.263062] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2186.263062] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Deleting the datastore file [datastore2] e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2186.263305] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7df838eb-e905-427c-ad84-4e258130aafa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.269570] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Waiting for the task: (returnval){ [ 2186.269570] env[61440]: value = "task-4281419" [ 2186.269570] env[61440]: _type = "Task" [ 2186.269570] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.276817] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Task: {'id': task-4281419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.715186] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2186.715465] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Creating directory with path [datastore2] vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2186.715688] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49851a52-ccb2-4b4c-bcb3-fa3995c988ad {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.726954] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Created directory with path [datastore2] vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2186.727121] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Fetch image to [datastore2] vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2186.727295] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2186.728029] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba134945-8f5c-4d32-abc6-f990580d663e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.734486] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd3d8de-3a8b-4643-bf79-0845303e852e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.743209] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9d2344-d07d-4566-9830-4c2eea124a58 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.776574] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e186cbb9-2390-4a4e-bb0a-bfc549416fe2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.782782] env[61440]: DEBUG oslo_vmware.api [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Task: {'id': task-4281419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086036} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.784145] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2186.784354] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2186.784522] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2186.784696] env[61440]: INFO nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2186.786466] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ba553b45-718d-4dc3-843c-fe77c75582bd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.789457] env[61440]: DEBUG nova.compute.claims [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2186.789631] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.789864] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.809883] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2186.899452] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2186.958811] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2186.959011] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2187.010903] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0f8f43-936a-43e9-a01e-5a99d3bb1548 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.018558] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6e9b77-8f1b-4d39-9e32-fdb75f571242 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.046842] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1120c2e-1d1f-4e02-91f0-4aab7aef519a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.053417] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c826e8-1b63-4b07-acaa-64c195828404 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.066634] env[61440]: DEBUG nova.compute.provider_tree [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2187.077451] env[61440]: DEBUG nova.scheduler.client.report [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2187.091345] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.301s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.091856] env[61440]: ERROR nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2187.091856] env[61440]: Faults: ['InvalidArgument'] [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Traceback (most recent call last): [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self.driver.spawn(context, instance, image_meta, [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self._fetch_image_if_missing(context, vi) [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] image_cache(vi, tmp_image_ds_loc) [ 2187.091856] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] vm_util.copy_virtual_disk( [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] session._wait_for_task(vmdk_copy_task) [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] return self.wait_for_task(task_ref) [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] return evt.wait() [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] result = hub.switch() [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] return self.greenlet.switch() [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2187.092297] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] self.f(*self.args, **self.kw) [ 2187.092773] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2187.092773] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] raise exceptions.translate_fault(task_info.error) [ 2187.092773] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2187.092773] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Faults: ['InvalidArgument'] [ 2187.092773] env[61440]: ERROR nova.compute.manager [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] [ 2187.092773] env[61440]: DEBUG nova.compute.utils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2187.093868] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Build of instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 was re-scheduled: A specified parameter was not correct: fileType [ 2187.093868] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2187.094262] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2187.094431] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2187.094598] env[61440]: DEBUG nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2187.094774] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2187.457322] env[61440]: DEBUG nova.network.neutron [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.472842] env[61440]: INFO nova.compute.manager [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Took 0.38 seconds to deallocate network for instance. [ 2187.587685] env[61440]: INFO nova.scheduler.client.report [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Deleted allocations for instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 [ 2187.645776] env[61440]: DEBUG oslo_concurrency.lockutils [None req-19512b83-c93e-4725-9405-24154ba8a9be tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 687.653s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.645776] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 491.400s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.645776] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Acquiring lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.646036] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.646285] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.648440] env[61440]: INFO nova.compute.manager [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Terminating instance [ 2187.650425] env[61440]: DEBUG nova.compute.manager [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2187.650621] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2187.651145] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39d2e377-c2d6-4d05-bb3a-b542f6ae0f4c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.660481] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b91bb8-7c02-4ca7-86f3-e5d40bba71aa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.688863] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59 could not be found. [ 2187.689122] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2187.689318] env[61440]: INFO nova.compute.manager [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2187.689561] env[61440]: DEBUG oslo.service.loopingcall [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2187.689791] env[61440]: DEBUG nova.compute.manager [-] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2187.689914] env[61440]: DEBUG nova.network.neutron [-] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2187.715311] env[61440]: DEBUG nova.network.neutron [-] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.723381] env[61440]: INFO nova.compute.manager [-] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] Took 0.03 seconds to deallocate network for instance. [ 2187.819581] env[61440]: DEBUG oslo_concurrency.lockutils [None req-3b7a4d73-19be-41cd-a05a-0255c1cf8b8b tempest-ServerGroupTestJSON-436394410 tempest-ServerGroupTestJSON-436394410-project-member] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.820773] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 413.775s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.821145] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59] During sync_power_state the instance has a pending task (deleting). Skip. [ 2187.821389] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "e6d7a1a6-3c2e-4d23-bb2b-c89757adbf59" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.273841] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2195.274236] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2195.274236] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2195.293706] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.293900] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294009] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294147] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294276] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294400] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294522] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294641] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2195.294762] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2196.274510] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.274951] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.275024] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.275457] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2200.275269] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.274021] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.274807] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.275167] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.286632] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.286846] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.287017] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.287182] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2202.288275] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14978bde-69f0-4efe-af33-0ad0f90fca63 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.297298] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ae8fa8-8b49-4fbe-881f-0c938cf14c26 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.311023] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c044d1-265a-4973-98c0-0165bf8207be {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.317276] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f4544b-f9c2-41f3-8ac7-361ac71a1612 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.345729] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180671MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2202.345875] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.346082] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.411944] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f99f2c72-3158-46db-b21b-7f0066539252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412455] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412455] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412455] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412563] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412665] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412784] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.412900] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2b6d953f-c3e5-4671-9eac-61523a169b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2202.413096] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2202.413238] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=183GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2202.509329] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d09ae21-5952-4a0d-8809-40ac26b9601d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.516412] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e25c55-64e9-4e28-a793-ab88755bf087 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.545870] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7deeae1f-03ec-4ff0-b8e2-0b518f2ae148 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.552395] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7e1e98-de5b-4204-b8d2-829f929a06a1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.564581] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2202.574030] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2202.586617] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2202.586789] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.241s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.507962] env[61440]: DEBUG oslo_concurrency.lockutils [None req-dab0ee1f-e99e-4c2c-98f2-cf793b13af44 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "04558d4f-12c4-461a-93f9-64a32618f3d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.581349] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2218.271117] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.052338] env[61440]: WARNING oslo_vmware.rw_handles [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2235.052338] env[61440]: ERROR oslo_vmware.rw_handles [ 2235.053275] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2235.054610] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2235.054868] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Copying Virtual Disk [datastore2] vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/44786be1-1520-41a5-8b2b-2eba8fc2f2ce/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2235.055182] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b964bf6e-11e3-485f-9463-674e1257f318 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.063560] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Waiting for the task: (returnval){ [ 2235.063560] env[61440]: value = "task-4281420" [ 2235.063560] env[61440]: _type = "Task" [ 2235.063560] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.070953] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Task: {'id': task-4281420, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.573975] env[61440]: DEBUG oslo_vmware.exceptions [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2235.574270] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2235.574853] env[61440]: ERROR nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.574853] env[61440]: Faults: ['InvalidArgument'] [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] Traceback (most recent call last): [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] yield resources [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self.driver.spawn(context, instance, image_meta, [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self._fetch_image_if_missing(context, vi) [ 2235.574853] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] image_cache(vi, tmp_image_ds_loc) [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] vm_util.copy_virtual_disk( [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] session._wait_for_task(vmdk_copy_task) [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] return self.wait_for_task(task_ref) [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] return evt.wait() [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] result = hub.switch() [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2235.575274] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] return self.greenlet.switch() [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self.f(*self.args, **self.kw) [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] raise exceptions.translate_fault(task_info.error) [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] Faults: ['InvalidArgument'] [ 2235.575667] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] [ 2235.575667] env[61440]: INFO nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Terminating instance [ 2235.576779] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2235.576993] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2235.577245] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58e45347-7dec-40bb-9b3a-e88fa263e2a1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.579321] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2235.579519] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2235.580254] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71007d5-4470-494a-9273-c5aac4ba5dc1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.587902] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2235.588157] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5c65b6c-0a27-499d-bdca-30a43ec0565e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.590178] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2235.590353] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2235.591298] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5e9a0e9-6163-485e-a142-e528d5f8cb43 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.595649] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for the task: (returnval){ [ 2235.595649] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a13cb9-aac4-b534-3aa6-d7e62adea62c" [ 2235.595649] env[61440]: _type = "Task" [ 2235.595649] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.607925] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52a13cb9-aac4-b534-3aa6-d7e62adea62c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.658163] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2235.658394] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2235.658576] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Deleting the datastore file [datastore2] f99f2c72-3158-46db-b21b-7f0066539252 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2235.658841] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d00f4a31-d62d-4ebc-92a0-638f00bb24f3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.665336] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Waiting for the task: (returnval){ [ 2235.665336] env[61440]: value = "task-4281422" [ 2235.665336] env[61440]: _type = "Task" [ 2235.665336] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.672537] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Task: {'id': task-4281422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.106250] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2236.106568] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Creating directory with path [datastore2] vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2236.106704] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc82d146-9acb-43b8-9e68-3230406fcbc2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.117455] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Created directory with path [datastore2] vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2236.117662] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Fetch image to [datastore2] vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2236.117841] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2236.118646] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30756b95-71f8-4e88-9510-0c6d6fd8451c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.124898] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3036ffda-264b-42de-90df-c0f5dc5e6772 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.133615] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebf0598-2eda-4802-b116-d0e1f574deb6 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.165778] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e630b5a-7d95-455a-b35a-38fb471eb0bf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.178053] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3006db77-05ac-4d97-bf72-3d28b41c02f2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.178889] env[61440]: DEBUG oslo_vmware.api [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Task: {'id': task-4281422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063395} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2236.179150] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2236.179339] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2236.179516] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2236.179691] env[61440]: INFO nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2236.181930] env[61440]: DEBUG nova.compute.claims [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2236.182133] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.182353] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.201702] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2236.254622] env[61440]: DEBUG oslo_vmware.rw_handles [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2236.316506] env[61440]: DEBUG oslo_vmware.rw_handles [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2236.316751] env[61440]: DEBUG oslo_vmware.rw_handles [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2236.385422] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d88bf70-0f24-4c45-883b-15d46579bd45 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.394324] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1aac06-ef93-4161-9d29-f1cb6f166690 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.422940] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbde87ad-293f-4049-ad73-b80b6f800475 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.429408] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aba3ef1-35c6-4100-889d-87f2249d68fa {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.441803] env[61440]: DEBUG nova.compute.provider_tree [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.451293] env[61440]: DEBUG nova.scheduler.client.report [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2236.465777] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.283s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.466317] env[61440]: ERROR nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.466317] env[61440]: Faults: ['InvalidArgument'] [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] Traceback (most recent call last): [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self.driver.spawn(context, instance, image_meta, [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self._fetch_image_if_missing(context, vi) [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] image_cache(vi, tmp_image_ds_loc) [ 2236.466317] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] vm_util.copy_virtual_disk( [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] session._wait_for_task(vmdk_copy_task) [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] return self.wait_for_task(task_ref) [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] return evt.wait() [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] result = hub.switch() [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] return self.greenlet.switch() [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2236.466639] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] self.f(*self.args, **self.kw) [ 2236.466933] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2236.466933] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] raise exceptions.translate_fault(task_info.error) [ 2236.466933] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.466933] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] Faults: ['InvalidArgument'] [ 2236.466933] env[61440]: ERROR nova.compute.manager [instance: f99f2c72-3158-46db-b21b-7f0066539252] [ 2236.467222] env[61440]: DEBUG nova.compute.utils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2236.468326] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Build of instance f99f2c72-3158-46db-b21b-7f0066539252 was re-scheduled: A specified parameter was not correct: fileType [ 2236.468326] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2236.468689] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2236.468861] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2236.469044] env[61440]: DEBUG nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2236.469214] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2236.839200] env[61440]: DEBUG nova.network.neutron [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.849932] env[61440]: INFO nova.compute.manager [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Took 0.38 seconds to deallocate network for instance. [ 2236.956778] env[61440]: INFO nova.scheduler.client.report [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Deleted allocations for instance f99f2c72-3158-46db-b21b-7f0066539252 [ 2236.976498] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a227fbba-ad84-4339-a724-97cfb25afd0b tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "f99f2c72-3158-46db-b21b-7f0066539252" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 687.419s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.976688] env[61440]: DEBUG oslo_concurrency.lockutils [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "f99f2c72-3158-46db-b21b-7f0066539252" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 491.443s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.976877] env[61440]: DEBUG oslo_concurrency.lockutils [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Acquiring lock "f99f2c72-3158-46db-b21b-7f0066539252-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.977121] env[61440]: DEBUG oslo_concurrency.lockutils [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "f99f2c72-3158-46db-b21b-7f0066539252-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.977298] env[61440]: DEBUG oslo_concurrency.lockutils [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "f99f2c72-3158-46db-b21b-7f0066539252-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.979504] env[61440]: INFO nova.compute.manager [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Terminating instance [ 2236.981766] env[61440]: DEBUG nova.compute.manager [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2236.981766] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2236.982104] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0bdf8ca-b1e5-463a-87a2-498e236746b9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.991485] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0fd3c5-943d-431e-bb9a-67834bd6903f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.018993] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f99f2c72-3158-46db-b21b-7f0066539252 could not be found. [ 2237.019219] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2237.019407] env[61440]: INFO nova.compute.manager [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2237.019646] env[61440]: DEBUG oslo.service.loopingcall [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2237.020140] env[61440]: DEBUG nova.compute.manager [-] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2237.020238] env[61440]: DEBUG nova.network.neutron [-] [instance: f99f2c72-3158-46db-b21b-7f0066539252] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2237.042918] env[61440]: DEBUG nova.network.neutron [-] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2237.050661] env[61440]: INFO nova.compute.manager [-] [instance: f99f2c72-3158-46db-b21b-7f0066539252] Took 0.03 seconds to deallocate network for instance. [ 2237.147028] env[61440]: DEBUG oslo_concurrency.lockutils [None req-12701425-41bf-4d10-9253-f0cb4f78c11e tempest-ServersNegativeTestJSON-1774089910 tempest-ServersNegativeTestJSON-1774089910-project-member] Lock "f99f2c72-3158-46db-b21b-7f0066539252" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.147949] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "f99f2c72-3158-46db-b21b-7f0066539252" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 463.102s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.148178] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f99f2c72-3158-46db-b21b-7f0066539252] During sync_power_state the instance has a pending task (deleting). Skip. [ 2237.148357] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "f99f2c72-3158-46db-b21b-7f0066539252" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.275110] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2256.275486] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2256.275486] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2256.295398] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.296941] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.296941] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.296941] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.296941] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.296941] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.297261] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2256.297261] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2258.274947] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.275283] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2259.275030] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2259.275030] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2260.276022] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2261.274614] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.275085] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.275490] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.287488] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.287703] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.287870] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.288041] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2264.289135] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ebeb60-53c9-4d72-83c2-d312b1e65493 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.297767] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5749cf45-d851-4833-9856-ba59432df21b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.310903] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095db4aa-77fa-499a-a77b-f2e97d8571d7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.316712] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc31af9f-9e02-4c00-ab6f-96c46e3b8a7f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.345451] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180665MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2264.345612] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.345776] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.413832] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414013] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414156] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414286] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414407] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414527] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414645] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2b6d953f-c3e5-4671-9eac-61523a169b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2264.414827] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2264.414965] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=183GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2264.508636] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a357d1d-db09-4d85-ab4e-b98a34e7994b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.516419] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ba0e91-e892-4326-afe7-22fe5d3a32c8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.544939] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633aba09-da7c-4d82-be3e-4f53cd5cfebd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.551559] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b5933e-9d74-45bf-83d4-4e796f54de71 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.564767] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2264.574554] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2264.587986] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2264.588192] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.242s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.583974] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2284.856336] env[61440]: WARNING oslo_vmware.rw_handles [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2284.856336] env[61440]: ERROR oslo_vmware.rw_handles [ 2284.857087] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2284.858612] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2284.858858] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Copying Virtual Disk [datastore2] vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/c65ca160-fa35-45ed-9254-d6e4f854113a/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2284.859153] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bf1971a-6096-4ca7-9c76-14a2a0911c7d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.866722] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for the task: (returnval){ [ 2284.866722] env[61440]: value = "task-4281423" [ 2284.866722] env[61440]: _type = "Task" [ 2284.866722] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.874797] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': task-4281423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.377161] env[61440]: DEBUG oslo_vmware.exceptions [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2285.377469] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.378096] env[61440]: ERROR nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2285.378096] env[61440]: Faults: ['InvalidArgument'] [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Traceback (most recent call last): [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] yield resources [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self.driver.spawn(context, instance, image_meta, [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self._fetch_image_if_missing(context, vi) [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2285.378096] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] image_cache(vi, tmp_image_ds_loc) [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] vm_util.copy_virtual_disk( [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] session._wait_for_task(vmdk_copy_task) [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] return self.wait_for_task(task_ref) [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] return evt.wait() [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] result = hub.switch() [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] return self.greenlet.switch() [ 2285.378652] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2285.379066] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self.f(*self.args, **self.kw) [ 2285.379066] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2285.379066] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] raise exceptions.translate_fault(task_info.error) [ 2285.379066] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2285.379066] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Faults: ['InvalidArgument'] [ 2285.379066] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] [ 2285.379066] env[61440]: INFO nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Terminating instance [ 2285.380017] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2285.380257] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2285.380503] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-234f4ebe-3177-46bf-bf43-9835901192e4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.382768] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2285.383236] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2285.383708] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476402bd-cd7f-4bf3-a274-47ca414aeb4e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.390875] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2285.391104] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e40ee278-b773-403a-b6e0-d29a513b98cf {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.393264] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2285.393459] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2285.394392] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba9aae28-62c7-4eba-a981-644f752e6c4b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.398949] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 2285.398949] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]522dd920-721a-adbc-8df1-3a0f38dabbef" [ 2285.398949] env[61440]: _type = "Task" [ 2285.398949] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.406374] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]522dd920-721a-adbc-8df1-3a0f38dabbef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.455368] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2285.455614] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2285.455731] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Deleting the datastore file [datastore2] f5547fbd-9d74-4217-bba3-3747b3f3c9b2 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2285.455996] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0bfbfce-b0ea-4c99-95cb-4720f68f9f1a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.463022] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for the task: (returnval){ [ 2285.463022] env[61440]: value = "task-4281425" [ 2285.463022] env[61440]: _type = "Task" [ 2285.463022] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.470851] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': task-4281425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.909912] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2285.911573] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating directory with path [datastore2] vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2285.911573] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e6279c7-e589-4399-b2c1-706975a57ff1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.922790] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Created directory with path [datastore2] vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2285.923238] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Fetch image to [datastore2] vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2285.923578] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2285.924451] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117e06ac-3672-4730-a7ee-9e1447f7e05a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.931201] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f98dac-bd03-43f7-8235-073a79d9eb1f {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.939982] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a12bc19-c1b7-4036-863a-2903d0e54227 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.972720] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9dabdb-f3a4-43cb-b7f0-0d155a84b5f2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.980022] env[61440]: DEBUG oslo_vmware.api [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': task-4281425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065487} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.980974] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2285.981348] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2285.981501] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2285.981864] env[61440]: INFO nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2285.983869] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4850903f-65bf-43de-b67b-2e848e90db6e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.985357] env[61440]: DEBUG nova.compute.claims [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2285.985532] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2285.985743] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.007900] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2286.067228] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2286.125640] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2286.125830] env[61440]: DEBUG oslo_vmware.rw_handles [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2286.176917] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d342d288-f19e-4c37-8dd3-5a1996f78882 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.184459] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd72b72b-62df-4710-a877-582dade72cf3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.212820] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0869b0-5c41-4998-ac0d-e9b135818e3a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.219721] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8d52e0-67e7-424e-878b-248e26210bcd {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.233103] env[61440]: DEBUG nova.compute.provider_tree [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2286.243436] env[61440]: DEBUG nova.scheduler.client.report [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2286.258443] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.273s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.258958] env[61440]: ERROR nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2286.258958] env[61440]: Faults: ['InvalidArgument'] [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Traceback (most recent call last): [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self.driver.spawn(context, instance, image_meta, [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self._fetch_image_if_missing(context, vi) [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] image_cache(vi, tmp_image_ds_loc) [ 2286.258958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] vm_util.copy_virtual_disk( [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] session._wait_for_task(vmdk_copy_task) [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] return self.wait_for_task(task_ref) [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] return evt.wait() [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] result = hub.switch() [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] return self.greenlet.switch() [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2286.259471] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] self.f(*self.args, **self.kw) [ 2286.259958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2286.259958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] raise exceptions.translate_fault(task_info.error) [ 2286.259958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2286.259958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Faults: ['InvalidArgument'] [ 2286.259958] env[61440]: ERROR nova.compute.manager [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] [ 2286.259958] env[61440]: DEBUG nova.compute.utils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2286.261088] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Build of instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 was re-scheduled: A specified parameter was not correct: fileType [ 2286.261088] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2286.261473] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2286.261649] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2286.261820] env[61440]: DEBUG nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2286.261981] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2286.583262] env[61440]: DEBUG nova.network.neutron [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.595292] env[61440]: INFO nova.compute.manager [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Took 0.33 seconds to deallocate network for instance. [ 2286.691877] env[61440]: INFO nova.scheduler.client.report [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Deleted allocations for instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 [ 2286.713151] env[61440]: DEBUG oslo_concurrency.lockutils [None req-c82fd4ab-1b1f-4027-a3c7-c06f56a4c335 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 673.248s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.713468] env[61440]: DEBUG oslo_concurrency.lockutils [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 476.637s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.713698] env[61440]: DEBUG oslo_concurrency.lockutils [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.713929] env[61440]: DEBUG oslo_concurrency.lockutils [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.714127] env[61440]: DEBUG oslo_concurrency.lockutils [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.716399] env[61440]: INFO nova.compute.manager [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Terminating instance [ 2286.718444] env[61440]: DEBUG nova.compute.manager [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2286.718444] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2286.718984] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59d76047-94a6-4a34-bcd2-7bbaf9bdf78b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.728317] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19129460-ff9f-4f78-af2e-b57517b31b62 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.757078] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5547fbd-9d74-4217-bba3-3747b3f3c9b2 could not be found. [ 2286.757293] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2286.757477] env[61440]: INFO nova.compute.manager [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2286.757730] env[61440]: DEBUG oslo.service.loopingcall [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2286.757943] env[61440]: DEBUG nova.compute.manager [-] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2286.758068] env[61440]: DEBUG nova.network.neutron [-] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2286.783880] env[61440]: DEBUG nova.network.neutron [-] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.792099] env[61440]: INFO nova.compute.manager [-] [instance: f5547fbd-9d74-4217-bba3-3747b3f3c9b2] Took 0.03 seconds to deallocate network for instance. [ 2286.907562] env[61440]: DEBUG oslo_concurrency.lockutils [None req-816e590d-b7ab-4bd6-bd6d-9d9a38552887 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "f5547fbd-9d74-4217-bba3-3747b3f3c9b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.276059] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2318.276059] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2318.276059] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2318.291694] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2318.291872] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2318.291978] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2318.292123] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2318.292251] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2318.292375] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2318.292499] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2318.292977] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.276033] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.276033] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances with incomplete migration {{(pid=61440) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2320.286853] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.287279] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.287279] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2322.274924] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2322.275211] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.276030] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.287778] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2324.288040] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2324.288180] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2324.288342] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2324.289460] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2971f2a7-7c8c-4e3b-8d30-8eefb79baa62 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.298134] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce648560-26e2-42d3-8849-7419a3b87bc3 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.311747] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174a4161-c340-49f0-9e61-667b679f3f8a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.317889] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ac277d-91e3-428c-a5fb-37d5f3b7317e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.345995] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180629MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2324.346162] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2324.346352] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2324.491188] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2324.491400] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2324.491521] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2324.491656] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2324.491781] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2324.491901] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2b6d953f-c3e5-4671-9eac-61523a169b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2324.492114] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2324.492261] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=183GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2324.507580] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing inventories for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2324.520177] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating ProviderTree inventory for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2324.520357] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Updating inventory in ProviderTree for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2324.530856] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing aggregate associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, aggregates: None {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2324.547421] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Refreshing trait associations for resource provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61440) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2324.620528] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfcf965-4998-4f62-9e8d-e1165375e9a7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.628404] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafdc7ea-7c60-4b98-9f42-baa5b1b95c7e {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.657272] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d461a8eb-e9ce-452d-a85d-8d842c22e9ec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.664341] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb4b60d-c009-49c9-9339-e6a8da5097e9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.677009] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2324.686219] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2324.699962] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2324.700180] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.354s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2325.694306] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.274224] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.274622] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.712026] env[61440]: WARNING oslo_vmware.rw_handles [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2333.712026] env[61440]: ERROR oslo_vmware.rw_handles [ 2333.712026] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2333.712966] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2333.713407] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Copying Virtual Disk [datastore2] vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/ced72935-f164-493b-92e8-dac5316c5b2e/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2333.713667] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bdbadce-0cb5-439e-883b-5ccd151f2e33 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.722654] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 2333.722654] env[61440]: value = "task-4281426" [ 2333.722654] env[61440]: _type = "Task" [ 2333.722654] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.730785] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': task-4281426, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.233845] env[61440]: DEBUG oslo_vmware.exceptions [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2334.234762] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2334.234999] env[61440]: ERROR nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2334.234999] env[61440]: Faults: ['InvalidArgument'] [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Traceback (most recent call last): [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] yield resources [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self.driver.spawn(context, instance, image_meta, [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self._fetch_image_if_missing(context, vi) [ 2334.234999] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] image_cache(vi, tmp_image_ds_loc) [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] vm_util.copy_virtual_disk( [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] session._wait_for_task(vmdk_copy_task) [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] return self.wait_for_task(task_ref) [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] return evt.wait() [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] result = hub.switch() [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2334.235431] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] return self.greenlet.switch() [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self.f(*self.args, **self.kw) [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] raise exceptions.translate_fault(task_info.error) [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Faults: ['InvalidArgument'] [ 2334.235780] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] [ 2334.235953] env[61440]: INFO nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Terminating instance [ 2334.238031] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2334.238031] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2334.238031] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f30d5657-edc4-4896-814f-c9cb325dc186 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.240395] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2334.240753] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2334.241598] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e12f276-e7cb-47a2-acd1-6a855fffba36 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.248237] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2334.248615] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da9bcd38-bbb2-412b-901c-452f0531fba9 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.251062] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2334.251149] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2334.252069] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f851cad-3f51-4e57-ac04-28d9b9265a69 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.259055] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 2334.259055] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52831a18-6db9-c108-be5c-d3f08ae85c7a" [ 2334.259055] env[61440]: _type = "Task" [ 2334.259055] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.265112] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52831a18-6db9-c108-be5c-d3f08ae85c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.281787] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2334.282191] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Cleaning up deleted instances {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2334.293805] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] There are 0 instances to clean {{(pid=61440) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2334.322767] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2334.322931] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2334.323067] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Deleting the datastore file [datastore2] debbffae-2f2a-4d8e-9630-b3fd8bb932e4 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2334.323782] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f69c4be-eb5f-4491-9161-0f608150a3e2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.330692] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for the task: (returnval){ [ 2334.330692] env[61440]: value = "task-4281428" [ 2334.330692] env[61440]: _type = "Task" [ 2334.330692] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.340146] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': task-4281428, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.768536] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2334.768841] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating directory with path [datastore2] vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2334.769032] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e63036b7-e2a1-4e35-a834-95a8a97a0c8b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.779995] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Created directory with path [datastore2] vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2334.780325] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Fetch image to [datastore2] vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2334.780380] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2334.783685] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3043bc-2cdd-451b-8917-430876a14233 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.788049] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e3b241-b097-4d00-8262-f5958a25ba0a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.797077] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f201d9-54d6-40e3-b839-b3f17e2fe3d2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.839892] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfdc4d9-bfb4-4524-b116-94762664b483 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.847213] env[61440]: DEBUG oslo_vmware.api [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Task: {'id': task-4281428, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067963} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.848885] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2334.849212] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2334.849477] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2334.849783] env[61440]: INFO nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2334.852094] env[61440]: DEBUG nova.compute.claims [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2334.852280] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2334.852543] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.859034] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-446bdfac-652b-4815-8627-632cca2206ec {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.879368] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2334.951021] env[61440]: DEBUG oslo_vmware.rw_handles [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2335.012445] env[61440]: DEBUG oslo_vmware.rw_handles [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2335.012704] env[61440]: DEBUG oslo_vmware.rw_handles [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2335.069141] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d9a6af-357b-4798-a443-56e463fe57ba {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.076838] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecabeb2-7cf9-410d-84f4-ab38fdff6a30 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.106189] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a977835-6e6b-4b84-8163-613cd2fdc00d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.112974] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbf7f21-1ad0-45c5-b6e8-01728a1b720a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.125866] env[61440]: DEBUG nova.compute.provider_tree [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2335.135422] env[61440]: DEBUG nova.scheduler.client.report [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2335.155482] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.156090] env[61440]: ERROR nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.156090] env[61440]: Faults: ['InvalidArgument'] [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Traceback (most recent call last): [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self.driver.spawn(context, instance, image_meta, [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self._fetch_image_if_missing(context, vi) [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] image_cache(vi, tmp_image_ds_loc) [ 2335.156090] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] vm_util.copy_virtual_disk( [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] session._wait_for_task(vmdk_copy_task) [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] return self.wait_for_task(task_ref) [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] return evt.wait() [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] result = hub.switch() [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] return self.greenlet.switch() [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2335.156634] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] self.f(*self.args, **self.kw) [ 2335.157136] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2335.157136] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] raise exceptions.translate_fault(task_info.error) [ 2335.157136] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.157136] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Faults: ['InvalidArgument'] [ 2335.157136] env[61440]: ERROR nova.compute.manager [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] [ 2335.157136] env[61440]: DEBUG nova.compute.utils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2335.158353] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Build of instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 was re-scheduled: A specified parameter was not correct: fileType [ 2335.158353] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2335.158746] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2335.158979] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2335.159227] env[61440]: DEBUG nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2335.159411] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2335.582433] env[61440]: DEBUG nova.network.neutron [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.597998] env[61440]: INFO nova.compute.manager [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Took 0.44 seconds to deallocate network for instance. [ 2335.714939] env[61440]: INFO nova.scheduler.client.report [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Deleted allocations for instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 [ 2335.740935] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a9c5c184-d8c8-44dd-8fb4-38cecdbab5be tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 586.296s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.741237] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 390.489s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.741496] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2335.741749] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.741930] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.744077] env[61440]: INFO nova.compute.manager [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Terminating instance [ 2335.747661] env[61440]: DEBUG nova.compute.manager [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2335.749973] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2335.749973] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82b0b42f-b486-4821-9ff2-e8130cd72874 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.757178] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a0976d-bd3f-4a44-b484-7de808a00888 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.783053] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance debbffae-2f2a-4d8e-9630-b3fd8bb932e4 could not be found. [ 2335.783327] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2335.783431] env[61440]: INFO nova.compute.manager [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2335.783675] env[61440]: DEBUG oslo.service.loopingcall [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2335.784157] env[61440]: DEBUG nova.compute.manager [-] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2335.784253] env[61440]: DEBUG nova.network.neutron [-] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2335.807849] env[61440]: DEBUG nova.network.neutron [-] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.817090] env[61440]: INFO nova.compute.manager [-] [instance: debbffae-2f2a-4d8e-9630-b3fd8bb932e4] Took 0.03 seconds to deallocate network for instance. [ 2335.931686] env[61440]: DEBUG oslo_concurrency.lockutils [None req-a414b55b-011d-4cfe-8710-6af9683c4fdd tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Lock "debbffae-2f2a-4d8e-9630-b3fd8bb932e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.117369] env[61440]: DEBUG oslo_concurrency.lockutils [None req-0ea5ff9b-efe3-4b62-b369-487b1accb362 tempest-AttachVolumeNegativeTest-158246958 tempest-AttachVolumeNegativeTest-158246958-project-member] Acquiring lock "2b6d953f-c3e5-4671-9eac-61523a169b99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2341.282206] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.618258] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.618604] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Getting list of instances from cluster (obj){ [ 2345.618604] env[61440]: value = "domain-c8" [ 2345.618604] env[61440]: _type = "ClusterComputeResource" [ 2345.618604] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2345.619631] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b42998-506e-47f3-b775-fadbf7892a62 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.632783] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Got total of 5 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2377.591260] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._sync_power_states {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2377.606377] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Getting list of instances from cluster (obj){ [ 2377.606377] env[61440]: value = "domain-c8" [ 2377.606377] env[61440]: _type = "ClusterComputeResource" [ 2377.606377] env[61440]: } {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2377.608062] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b17dbb-8517-4c92-bf7e-756793567245 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.621113] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Got total of 5 instances {{(pid=61440) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2377.621393] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid abbd5d5b-7821-435d-ac56-0d070ff08043 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2377.621624] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 976050d9-fd71-48db-9fb5-1b244f2ae4c4 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2377.621795] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 430f38e6-068a-4c50-b27a-24335bf7e3ec {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2377.622020] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 04558d4f-12c4-461a-93f9-64a32618f3d4 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2377.622200] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Triggering sync for uuid 2b6d953f-c3e5-4671-9eac-61523a169b99 {{(pid=61440) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2377.622508] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "abbd5d5b-7821-435d-ac56-0d070ff08043" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.622774] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "976050d9-fd71-48db-9fb5-1b244f2ae4c4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.623024] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "430f38e6-068a-4c50-b27a-24335bf7e3ec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.623248] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "04558d4f-12c4-461a-93f9-64a32618f3d4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.623448] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "2b6d953f-c3e5-4671-9eac-61523a169b99" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.306592] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2379.306868] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Starting heal instance info cache {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2379.306904] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Rebuilding the list of instances to heal {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2379.323486] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2379.323666] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2379.323778] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 430f38e6-068a-4c50-b27a-24335bf7e3ec] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2379.323910] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 04558d4f-12c4-461a-93f9-64a32618f3d4] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2379.324047] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: 2b6d953f-c3e5-4671-9eac-61523a169b99] Skipping network cache update for instance because it is Building. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2379.324177] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Didn't find any instances for network info cache update. {{(pid=61440) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2379.324644] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.274184] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.274373] env[61440]: DEBUG nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61440) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2382.274695] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.275172] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2383.274796] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.892517] env[61440]: WARNING oslo_vmware.rw_handles [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles response.begin() [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2384.892517] env[61440]: ERROR oslo_vmware.rw_handles [ 2384.892517] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Downloaded image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2384.894183] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Caching image {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2384.894433] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Copying Virtual Disk [datastore2] vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk to [datastore2] vmware_temp/7c3fbefd-5ace-4edb-afec-ccf484677a8d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk {{(pid=61440) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2384.894722] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03190906-8685-4106-bf56-ff754ae199a2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.905705] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 2384.905705] env[61440]: value = "task-4281429" [ 2384.905705] env[61440]: _type = "Task" [ 2384.905705] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2384.916408] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281429, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.269738] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2385.274408] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager.update_available_resource {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2385.286348] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2385.286563] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2385.286749] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.286937] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61440) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2385.288080] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3ffd33-5164-40f4-9978-114a474d2b5c {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.296544] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee1d898-8ba2-4918-b05b-3a821dbab808 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.310873] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31663340-8f58-481c-b332-eb3413acd8b2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.316885] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0622ba-2427-4bc7-ad4e-e105d7619662 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.345173] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180649MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61440) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2385.345338] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2385.345519] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2385.406347] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance abbd5d5b-7821-435d-ac56-0d070ff08043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2385.406505] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 976050d9-fd71-48db-9fb5-1b244f2ae4c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2385.406633] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 430f38e6-068a-4c50-b27a-24335bf7e3ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2385.406755] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 04558d4f-12c4-461a-93f9-64a32618f3d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2385.406882] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Instance 2b6d953f-c3e5-4671-9eac-61523a169b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61440) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2385.407077] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2385.407222] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=183GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61440) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2385.418846] env[61440]: DEBUG oslo_vmware.exceptions [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Fault InvalidArgument not matched. {{(pid=61440) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2385.419251] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2385.419934] env[61440]: ERROR nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2385.419934] env[61440]: Faults: ['InvalidArgument'] [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Traceback (most recent call last): [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] yield resources [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self.driver.spawn(context, instance, image_meta, [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self._fetch_image_if_missing(context, vi) [ 2385.419934] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] image_cache(vi, tmp_image_ds_loc) [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] vm_util.copy_virtual_disk( [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] session._wait_for_task(vmdk_copy_task) [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] return self.wait_for_task(task_ref) [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] return evt.wait() [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] result = hub.switch() [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2385.420280] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] return self.greenlet.switch() [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self.f(*self.args, **self.kw) [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] raise exceptions.translate_fault(task_info.error) [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Faults: ['InvalidArgument'] [ 2385.420607] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] [ 2385.420607] env[61440]: INFO nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Terminating instance [ 2385.421771] env[61440]: DEBUG oslo_concurrency.lockutils [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2385.422011] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2385.423713] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2385.424163] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2385.424650] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31ff9053-77eb-4164-87f6-fb6b7d1a0bde {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.427289] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a5cb0c-49cc-4a43-8a52-2ccd57ebac15 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.436114] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Unregistering the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2385.436335] env[61440]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a493903f-008c-4067-b972-2bf7ed1cf60a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.438421] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2385.438594] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61440) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2385.439516] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09574e4a-e3ca-4ebf-809e-af4a8149b0d4 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.443973] env[61440]: DEBUG oslo_vmware.api [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Waiting for the task: (returnval){ [ 2385.443973] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524bf1b4-1ab2-3692-26c3-af95c9393a2d" [ 2385.443973] env[61440]: _type = "Task" [ 2385.443973] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.454298] env[61440]: DEBUG oslo_vmware.api [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]524bf1b4-1ab2-3692-26c3-af95c9393a2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.492283] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad29193b-2bd6-4190-b9ff-58fef65efab2 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.499412] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d90362-9de3-4dbe-be93-87127daee047 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.506448] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Unregistered the VM {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2385.506739] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Deleting contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2385.506992] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleting the datastore file [datastore2] abbd5d5b-7821-435d-ac56-0d070ff08043 {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2385.507767] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eca7abb-a073-41f7-8812-90154a770206 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.534150] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4688e17d-f4cc-441b-9125-202eb263eb72 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.538097] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for the task: (returnval){ [ 2385.538097] env[61440]: value = "task-4281431" [ 2385.538097] env[61440]: _type = "Task" [ 2385.538097] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.543385] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfe42c8-edd3-4e66-940f-9a27cf4c9bb8 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.550901] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.558972] env[61440]: DEBUG nova.compute.provider_tree [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2385.569037] env[61440]: DEBUG nova.scheduler.client.report [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2385.586268] env[61440]: DEBUG nova.compute.resource_tracker [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61440) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2385.586458] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.241s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.956742] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Preparing fetch location {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2385.957130] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating directory with path [datastore2] vmware_temp/b14541c5-1849-4d5c-af4c-59a96fff327d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2385.957278] env[61440]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-daf0035e-90da-49a9-b5d9-eedfb65ba8e1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.968860] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Created directory with path [datastore2] vmware_temp/b14541c5-1849-4d5c-af4c-59a96fff327d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2385.969065] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Fetch image to [datastore2] vmware_temp/b14541c5-1849-4d5c-af4c-59a96fff327d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2385.969245] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to [datastore2] vmware_temp/b14541c5-1849-4d5c-af4c-59a96fff327d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk on the data store datastore2 {{(pid=61440) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2385.969970] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8a30fe-7d02-4df1-8e7e-38f7bd278d05 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.976904] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba75079c-f5f7-4bdf-a638-409d57416b10 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.985871] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f06f987-8295-4db4-9bab-ece24bd4fd79 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.036020] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8598ca4-c2f7-41ef-a4d3-5f46aaa65dd0 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.043997] env[61440]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f9f6d780-97d5-4dd0-a1ee-24aef47ae2b1 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.051116] env[61440]: DEBUG oslo_vmware.api [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Task: {'id': task-4281431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077592} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.051496] env[61440]: DEBUG nova.virt.vmwareapi.ds_util [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleted the datastore file {{(pid=61440) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2386.054019] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Deleted contents of the VM from datastore datastore2 {{(pid=61440) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2386.054019] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2386.054019] env[61440]: INFO nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2386.054773] env[61440]: DEBUG nova.compute.claims [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Aborting claim: {{(pid=61440) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2386.054974] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.055269] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2386.067481] env[61440]: DEBUG nova.virt.vmwareapi.images [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] [instance: 976050d9-fd71-48db-9fb5-1b244f2ae4c4] Downloading image file data 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 to the data store datastore2 {{(pid=61440) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2386.211797] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa526950-b61b-4b54-94ae-e0ccae34a8f7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.215909] env[61440]: DEBUG oslo_vmware.rw_handles [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b14541c5-1849-4d5c-af4c-59a96fff327d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2386.221183] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4b121c-e240-4652-9698-f1d4bf9c1c93 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.303870] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0886b5d2-6619-4fb7-ab47-5ff4a0035f19 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.306618] env[61440]: DEBUG oslo_vmware.rw_handles [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Completed reading data from the image iterator. {{(pid=61440) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2386.306791] env[61440]: DEBUG oslo_vmware.rw_handles [None req-7d447ffd-742b-48cf-8923-84bc4641c16c tempest-DeleteServersTestJSON-1288921333 tempest-DeleteServersTestJSON-1288921333-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b14541c5-1849-4d5c-af4c-59a96fff327d/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61440) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2386.311960] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6910f9-f5ee-4120-8e72-fcb10fed21c7 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.324838] env[61440]: DEBUG nova.compute.provider_tree [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2386.335674] env[61440]: DEBUG nova.scheduler.client.report [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2386.348746] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2386.349299] env[61440]: ERROR nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2386.349299] env[61440]: Faults: ['InvalidArgument'] [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Traceback (most recent call last): [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self.driver.spawn(context, instance, image_meta, [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self._fetch_image_if_missing(context, vi) [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] image_cache(vi, tmp_image_ds_loc) [ 2386.349299] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] vm_util.copy_virtual_disk( [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] session._wait_for_task(vmdk_copy_task) [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] return self.wait_for_task(task_ref) [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] return evt.wait() [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] result = hub.switch() [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] return self.greenlet.switch() [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2386.349833] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] self.f(*self.args, **self.kw) [ 2386.350696] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2386.350696] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] raise exceptions.translate_fault(task_info.error) [ 2386.350696] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2386.350696] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Faults: ['InvalidArgument'] [ 2386.350696] env[61440]: ERROR nova.compute.manager [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] [ 2386.350696] env[61440]: DEBUG nova.compute.utils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] VimFaultException {{(pid=61440) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2386.351509] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Build of instance abbd5d5b-7821-435d-ac56-0d070ff08043 was re-scheduled: A specified parameter was not correct: fileType [ 2386.351509] env[61440]: Faults: ['InvalidArgument'] {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2386.351928] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Unplugging VIFs for instance {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2386.352122] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61440) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2386.352303] env[61440]: DEBUG nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2386.352469] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2386.586644] env[61440]: DEBUG oslo_service.periodic_task [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61440) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.769083] env[61440]: DEBUG nova.network.neutron [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2386.797891] env[61440]: INFO nova.compute.manager [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Took 0.45 seconds to deallocate network for instance. [ 2386.988377] env[61440]: INFO nova.scheduler.client.report [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Deleted allocations for instance abbd5d5b-7821-435d-ac56-0d070ff08043 [ 2387.014156] env[61440]: DEBUG oslo_concurrency.lockutils [None req-597f7452-0392-43c8-96bf-b0f0dcb3f996 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 598.304s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.014490] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 401.956s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2387.014771] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Acquiring lock "abbd5d5b-7821-435d-ac56-0d070ff08043-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.015112] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2387.015400] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.019653] env[61440]: INFO nova.compute.manager [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Terminating instance [ 2387.021383] env[61440]: DEBUG nova.compute.manager [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Start destroying the instance on the hypervisor. {{(pid=61440) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2387.021579] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Destroying instance {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2387.022059] env[61440]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4e7aa3a-15f7-4411-981d-44e29409d40b {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.033462] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea1da33-50c4-4a9f-b5b1-4522de8edb30 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.059354] env[61440]: WARNING nova.virt.vmwareapi.vmops [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance abbd5d5b-7821-435d-ac56-0d070ff08043 could not be found. [ 2387.059466] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Instance destroyed {{(pid=61440) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2387.059716] env[61440]: INFO nova.compute.manager [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2387.059962] env[61440]: DEBUG oslo.service.loopingcall [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2387.060206] env[61440]: DEBUG nova.compute.manager [-] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Deallocating network for instance {{(pid=61440) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2387.060305] env[61440]: DEBUG nova.network.neutron [-] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] deallocate_for_instance() {{(pid=61440) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2387.097456] env[61440]: DEBUG nova.network.neutron [-] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Updating instance_info_cache with network_info: [] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2387.105978] env[61440]: INFO nova.compute.manager [-] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] Took 0.05 seconds to deallocate network for instance. [ 2387.254651] env[61440]: DEBUG oslo_concurrency.lockutils [None req-ca9de3ad-fc6f-43fe-9927-c9f79ed0b6d0 tempest-ServerDiskConfigTestJSON-1819225197 tempest-ServerDiskConfigTestJSON-1819225197-project-member] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.240s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.255474] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.633s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2387.256128] env[61440]: INFO nova.compute.manager [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] [instance: abbd5d5b-7821-435d-ac56-0d070ff08043] During sync_power_state the instance has a pending task (deleting). Skip. [ 2387.256128] env[61440]: DEBUG oslo_concurrency.lockutils [None req-711ca2c0-9694-4d1d-aed3-727b6c8263cf None None] Lock "abbd5d5b-7821-435d-ac56-0d070ff08043" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2400.382327] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "dca18a02-d03c-4a8d-b838-c73954710601" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2400.383289] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "dca18a02-d03c-4a8d-b838-c73954710601" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2400.392616] env[61440]: DEBUG nova.compute.manager [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Starting instance... {{(pid=61440) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2400.447327] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2400.447611] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2400.449109] env[61440]: INFO nova.compute.claims [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2400.562522] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f18cd1-75db-4dcd-8895-faa90fb6213d {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.570530] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eee4d71-fcda-4529-b43c-1953c456ee09 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.599184] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dfa4c1-4ea9-4a7d-aeff-acc3cfbb2a09 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.606090] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbf74e2-6e1e-4e9b-afca-928513cbd632 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.618818] env[61440]: DEBUG nova.compute.provider_tree [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed in ProviderTree for provider: 9fb487e1-32f6-4c78-bc1f-37162b31d3aa {{(pid=61440) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2400.628308] env[61440]: DEBUG nova.scheduler.client.report [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Inventory has not changed for provider 9fb487e1-32f6-4c78-bc1f-37162b31d3aa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61440) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2400.641869] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.194s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2400.642398] env[61440]: DEBUG nova.compute.manager [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Start building networks asynchronously for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2400.673276] env[61440]: DEBUG nova.compute.utils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Using /dev/sd instead of None {{(pid=61440) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2400.674577] env[61440]: DEBUG nova.compute.manager [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Allocating IP information in the background. {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2400.674783] env[61440]: DEBUG nova.network.neutron [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] allocate_for_instance() {{(pid=61440) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2400.683913] env[61440]: DEBUG nova.compute.manager [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Start building block device mappings for instance. {{(pid=61440) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2400.740657] env[61440]: DEBUG nova.policy [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8be059e7fab4a84b58f00f1490fdb41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4eab358aa42d42659e93d2ead48ed0a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61440) authorize /opt/stack/nova/nova/policy.py:203}} [ 2400.744980] env[61440]: DEBUG nova.compute.manager [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Start spawning the instance on the hypervisor. {{(pid=61440) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2400.782184] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T01:20:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T01:20:20Z,direct_url=,disk_format='vmdk',id=5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4dd68985b0414373a45d24938e7b9731',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T01:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2400.782436] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Flavor limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2400.782595] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Image limits 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2400.782777] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Flavor pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2400.782924] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Image pref 0:0:0 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2400.783084] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61440) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2400.783297] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2400.783452] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2400.783615] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Got 1 possible topologies {{(pid=61440) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2400.783777] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2400.783954] env[61440]: DEBUG nova.virt.hardware [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61440) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2400.785465] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6f119c-4ac8-4c1f-bb6d-06888efbbe3a {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.793104] env[61440]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65620a18-d33f-48f8-8ce8-539f23452d05 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.186982] env[61440]: DEBUG nova.network.neutron [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Successfully created port: e73571f3-53e2-472f-8cde-40b5ce04b939 {{(pid=61440) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2401.969777] env[61440]: DEBUG nova.compute.manager [req-6162232b-19d2-42dd-97c9-6c2433b1f5b0 req-5100df0e-64fc-4b29-9114-f5258b4e18ff service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Received event network-vif-plugged-e73571f3-53e2-472f-8cde-40b5ce04b939 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2401.970159] env[61440]: DEBUG oslo_concurrency.lockutils [req-6162232b-19d2-42dd-97c9-6c2433b1f5b0 req-5100df0e-64fc-4b29-9114-f5258b4e18ff service nova] Acquiring lock "dca18a02-d03c-4a8d-b838-c73954710601-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2401.970677] env[61440]: DEBUG oslo_concurrency.lockutils [req-6162232b-19d2-42dd-97c9-6c2433b1f5b0 req-5100df0e-64fc-4b29-9114-f5258b4e18ff service nova] Lock "dca18a02-d03c-4a8d-b838-c73954710601-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2401.970677] env[61440]: DEBUG oslo_concurrency.lockutils [req-6162232b-19d2-42dd-97c9-6c2433b1f5b0 req-5100df0e-64fc-4b29-9114-f5258b4e18ff service nova] Lock "dca18a02-d03c-4a8d-b838-c73954710601-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61440) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.970677] env[61440]: DEBUG nova.compute.manager [req-6162232b-19d2-42dd-97c9-6c2433b1f5b0 req-5100df0e-64fc-4b29-9114-f5258b4e18ff service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] No waiting events found dispatching network-vif-plugged-e73571f3-53e2-472f-8cde-40b5ce04b939 {{(pid=61440) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2401.971086] env[61440]: WARNING nova.compute.manager [req-6162232b-19d2-42dd-97c9-6c2433b1f5b0 req-5100df0e-64fc-4b29-9114-f5258b4e18ff service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Received unexpected event network-vif-plugged-e73571f3-53e2-472f-8cde-40b5ce04b939 for instance with vm_state building and task_state spawning. [ 2402.025302] env[61440]: DEBUG nova.network.neutron [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Successfully updated port: e73571f3-53e2-472f-8cde-40b5ce04b939 {{(pid=61440) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2402.036689] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "refresh_cache-dca18a02-d03c-4a8d-b838-c73954710601" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2402.036834] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "refresh_cache-dca18a02-d03c-4a8d-b838-c73954710601" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2402.036987] env[61440]: DEBUG nova.network.neutron [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Building network info cache for instance {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2402.085640] env[61440]: DEBUG nova.network.neutron [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Instance cache missing network info. {{(pid=61440) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2402.265996] env[61440]: DEBUG nova.network.neutron [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Updating instance_info_cache with network_info: [{"id": "e73571f3-53e2-472f-8cde-40b5ce04b939", "address": "fa:16:3e:7e:ca:fd", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73571f3-53", "ovs_interfaceid": "e73571f3-53e2-472f-8cde-40b5ce04b939", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.278979] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "refresh_cache-dca18a02-d03c-4a8d-b838-c73954710601" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2402.279287] env[61440]: DEBUG nova.compute.manager [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Instance network_info: |[{"id": "e73571f3-53e2-472f-8cde-40b5ce04b939", "address": "fa:16:3e:7e:ca:fd", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73571f3-53", "ovs_interfaceid": "e73571f3-53e2-472f-8cde-40b5ce04b939", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61440) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2402.279687] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:ca:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e73571f3-53e2-472f-8cde-40b5ce04b939', 'vif_model': 'vmxnet3'}] {{(pid=61440) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2402.287337] env[61440]: DEBUG oslo.service.loopingcall [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61440) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2402.287771] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Creating VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2402.287991] env[61440]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6606096-0d92-49be-b831-318bc43e29fc {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.307816] env[61440]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2402.307816] env[61440]: value = "task-4281432" [ 2402.307816] env[61440]: _type = "Task" [ 2402.307816] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.315130] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281432, 'name': CreateVM_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.817924] env[61440]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281432, 'name': CreateVM_Task, 'duration_secs': 0.302289} completed successfully. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.818107] env[61440]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Created VM on the ESX host {{(pid=61440) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2402.818747] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2402.818911] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2402.819277] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2402.819542] env[61440]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30439c2b-d18c-4ddf-805c-1d871ec7c423 {{(pid=61440) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.823982] env[61440]: DEBUG oslo_vmware.api [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Waiting for the task: (returnval){ [ 2402.823982] env[61440]: value = "session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ea72f5-a056-a999-4f39-cbe3f2c457d6" [ 2402.823982] env[61440]: _type = "Task" [ 2402.823982] env[61440]: } to complete. {{(pid=61440) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.832961] env[61440]: DEBUG oslo_vmware.api [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Task: {'id': session[5248723f-11b8-4af7-fa8a-b9f93d1be5df]52ea72f5-a056-a999-4f39-cbe3f2c457d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61440) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.334764] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2403.335078] env[61440]: DEBUG nova.virt.vmwareapi.vmops [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Processing image 5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5 {{(pid=61440) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2403.335262] env[61440]: DEBUG oslo_concurrency.lockutils [None req-5f838e53-d777-49bb-ade6-b43bdce1ff21 tempest-ServersTestJSON-678514262 tempest-ServersTestJSON-678514262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5/5a2cfd76-01a6-41c4-b1ca-0d55e896c6a5.vmdk" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2404.003204] env[61440]: DEBUG nova.compute.manager [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Received event network-changed-e73571f3-53e2-472f-8cde-40b5ce04b939 {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2404.003408] env[61440]: DEBUG nova.compute.manager [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Refreshing instance network info cache due to event network-changed-e73571f3-53e2-472f-8cde-40b5ce04b939. {{(pid=61440) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2404.003625] env[61440]: DEBUG oslo_concurrency.lockutils [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] Acquiring lock "refresh_cache-dca18a02-d03c-4a8d-b838-c73954710601" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2404.003767] env[61440]: DEBUG oslo_concurrency.lockutils [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] Acquired lock "refresh_cache-dca18a02-d03c-4a8d-b838-c73954710601" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2404.003935] env[61440]: DEBUG nova.network.neutron [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Refreshing network info cache for port e73571f3-53e2-472f-8cde-40b5ce04b939 {{(pid=61440) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2404.256036] env[61440]: DEBUG nova.network.neutron [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Updated VIF entry in instance network info cache for port e73571f3-53e2-472f-8cde-40b5ce04b939. {{(pid=61440) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2404.256430] env[61440]: DEBUG nova.network.neutron [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] [instance: dca18a02-d03c-4a8d-b838-c73954710601] Updating instance_info_cache with network_info: [{"id": "e73571f3-53e2-472f-8cde-40b5ce04b939", "address": "fa:16:3e:7e:ca:fd", "network": {"id": "4bffe8b8-5e5b-4773-8fb2-8bf0ea4f35b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1763091615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eab358aa42d42659e93d2ead48ed0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73571f3-53", "ovs_interfaceid": "e73571f3-53e2-472f-8cde-40b5ce04b939", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61440) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2404.271171] env[61440]: DEBUG oslo_concurrency.lockutils [req-721568e3-4efb-4c13-a8da-4ad9b1aa235d req-e9b8b6a4-9ec0-4870-9863-a85404c0835a service nova] Releasing lock "refresh_cache-dca18a02-d03c-4a8d-b838-c73954710601" {{(pid=61440) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}}